index
int64 0
0
| repo_id
stringlengths 9
205
| file_path
stringlengths 31
246
| content
stringlengths 1
12.2M
| __index_level_0__
int64 0
10k
|
---|---|---|---|---|
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/schema/DubboBeanDefinitionParser.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.schema;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.ClassUtils;
import org.apache.dubbo.common.utils.MethodUtils;
import org.apache.dubbo.common.utils.ReflectUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.AbstractServiceConfig;
import org.apache.dubbo.config.ArgumentConfig;
import org.apache.dubbo.config.ConsumerConfig;
import org.apache.dubbo.config.MethodConfig;
import org.apache.dubbo.config.MetricsConfig;
import org.apache.dubbo.config.ProtocolConfig;
import org.apache.dubbo.config.ProviderConfig;
import org.apache.dubbo.config.ReferenceConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.apache.dubbo.config.nested.AggregationConfig;
import org.apache.dubbo.config.nested.HistogramConfig;
import org.apache.dubbo.config.nested.PrometheusConfig;
import org.apache.dubbo.config.spring.Constants;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.ServiceBean;
import org.apache.dubbo.config.spring.reference.ReferenceAttributes;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.springframework.beans.PropertyValue;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.config.TypedStringValue;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import static org.apache.dubbo.common.constants.CommonConstants.HIDE_KEY_PREFIX;
import static org.apache.dubbo.config.spring.util.SpringCompatUtils.getPropertyValue;
/**
* AbstractBeanDefinitionParser
*
* @export
*/
public class DubboBeanDefinitionParser implements BeanDefinitionParser {
private static final Logger logger = LoggerFactory.getLogger(DubboBeanDefinitionParser.class);
private static final Pattern GROUP_AND_VERSION = Pattern.compile("^[\\-.0-9_a-zA-Z]+(\\:[\\-.0-9_a-zA-Z]+)?$");
private static final String ONRETURN = "onreturn";
private static final String ONTHROW = "onthrow";
private static final String ONINVOKE = "oninvoke";
private static final String EXECUTOR = "executor";
private static final String METHOD = "Method";
private static final String BEAN_NAME = "BEAN_NAME";
private static boolean resolvePlaceholdersEnabled = true;
private final Class<?> beanClass;
private static Map<String, Map<String, Class>> beanPropsCache = new HashMap<>();
public DubboBeanDefinitionParser(Class<?> beanClass) {
this.beanClass = beanClass;
}
@SuppressWarnings("unchecked")
private static RootBeanDefinition parse(
Element element, ParserContext parserContext, Class<?> beanClass, boolean registered) {
RootBeanDefinition beanDefinition = new RootBeanDefinition();
beanDefinition.setBeanClass(beanClass);
beanDefinition.setLazyInit(false);
if (ServiceBean.class.equals(beanClass)) {
beanDefinition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
}
// config id
String configId = resolveAttribute(element, "id", parserContext);
if (StringUtils.isNotEmpty(configId)) {
beanDefinition.getPropertyValues().addPropertyValue("id", configId);
}
String configName = "";
// get configName from name
if (StringUtils.isEmpty(configId)) {
configName = resolveAttribute(element, "name", parserContext);
}
String beanName = configId;
if (StringUtils.isEmpty(beanName)) {
// generate bean name
String prefix = beanClass.getName();
int counter = 0;
beanName = prefix + (StringUtils.isEmpty(configName) ? "#" : ("#" + configName + "#")) + counter;
while (parserContext.getRegistry().containsBeanDefinition(beanName)) {
beanName = prefix + (StringUtils.isEmpty(configName) ? "#" : ("#" + configName + "#")) + (counter++);
}
}
beanDefinition.setAttribute(BEAN_NAME, beanName);
if (ProtocolConfig.class.equals(beanClass)) {
// for (String name : parserContext.getRegistry().getBeanDefinitionNames()) {
// BeanDefinition definition = parserContext.getRegistry().getBeanDefinition(name);
// PropertyValue property = definition.getPropertyValues().getPropertyValue("protocol");
// if (property != null) {
// Object value = property.getValue();
// if (value instanceof ProtocolConfig && beanName.equals(((ProtocolConfig)
// value).getName())) {
// definition.getPropertyValues().addPropertyValue("protocol", new
// RuntimeBeanReference(beanName));
// }
// }
// }
} else if (ServiceBean.class.equals(beanClass)) {
String className = resolveAttribute(element, "class", parserContext);
if (StringUtils.isNotEmpty(className)) {
RootBeanDefinition classDefinition = new RootBeanDefinition();
classDefinition.setBeanClass(ReflectUtils.forName(className));
classDefinition.setLazyInit(false);
parseProperties(element.getChildNodes(), classDefinition, parserContext);
beanDefinition
.getPropertyValues()
.addPropertyValue("ref", new BeanDefinitionHolder(classDefinition, beanName + "Impl"));
}
}
Map<String, Class> beanPropTypeMap = beanPropsCache.get(beanClass.getName());
if (beanPropTypeMap == null) {
beanPropTypeMap = new HashMap<>();
beanPropsCache.put(beanClass.getName(), beanPropTypeMap);
if (ReferenceBean.class.equals(beanClass)) {
// extract bean props from ReferenceConfig
getPropertyMap(ReferenceConfig.class, beanPropTypeMap);
} else {
getPropertyMap(beanClass, beanPropTypeMap);
}
}
ManagedMap parameters = null;
Set<String> processedProps = new HashSet<>();
for (Map.Entry<String, Class> entry : beanPropTypeMap.entrySet()) {
String beanProperty = entry.getKey();
Class type = entry.getValue();
String property = StringUtils.camelToSplitName(beanProperty, "-");
processedProps.add(property);
if ("parameters".equals(property)) {
parameters = parseParameters(element.getChildNodes(), beanDefinition, parserContext);
} else if ("methods".equals(property)) {
parseMethods(beanName, element.getChildNodes(), beanDefinition, parserContext);
} else if ("arguments".equals(property)) {
parseArguments(beanName, element.getChildNodes(), beanDefinition, parserContext);
} else {
String value = resolveAttribute(element, property, parserContext);
if (StringUtils.isNotBlank(value)) {
value = value.trim();
if ("registry".equals(property) && RegistryConfig.NO_AVAILABLE.equalsIgnoreCase(value)) {
RegistryConfig registryConfig = new RegistryConfig();
registryConfig.setAddress(RegistryConfig.NO_AVAILABLE);
// see AbstractInterfaceConfig#registries, It will be invoker setRegistries method when
// BeanDefinition is registered,
beanDefinition.getPropertyValues().addPropertyValue("registries", registryConfig);
// If registry is N/A, don't init it until the reference is invoked
beanDefinition.setLazyInit(true);
} else if ("provider".equals(property)
|| "registry".equals(property)
|| ("protocol".equals(property)
&& AbstractServiceConfig.class.isAssignableFrom(beanClass))) {
/**
* For 'provider' 'protocol' 'registry', keep literal value (should be id/name) and set the value to 'registryIds' 'providerIds' protocolIds'
* The following process should make sure each id refers to the corresponding instance, here's how to find the instance for different use cases:
* 1. Spring, check existing bean by id, see{@link ServiceBean#afterPropertiesSet()}; then try to use id to find configs defined in remote Config Center
* 2. API, directly use id to find configs defined in remote Config Center; if all config instances are defined locally, please use {@link org.apache.dubbo.config.ServiceConfig#setRegistries(List)}
*/
beanDefinition.getPropertyValues().addPropertyValue(beanProperty + "Ids", value);
} else {
Object reference;
if (isPrimitive(type)) {
value = getCompatibleDefaultValue(property, value);
reference = value;
} else if (ONRETURN.equals(property) || ONTHROW.equals(property) || ONINVOKE.equals(property)) {
int index = value.lastIndexOf(".");
String ref = value.substring(0, index);
String method = value.substring(index + 1);
reference = new RuntimeBeanReference(ref);
beanDefinition.getPropertyValues().addPropertyValue(property + METHOD, method);
} else if (EXECUTOR.equals(property)) {
reference = new RuntimeBeanReference(value);
} else {
if ("ref".equals(property)
&& parserContext.getRegistry().containsBeanDefinition(value)) {
BeanDefinition refBean =
parserContext.getRegistry().getBeanDefinition(value);
if (!refBean.isSingleton()) {
throw new IllegalStateException(
"The exported service ref " + value + " must be singleton! Please set the "
+ value + " bean scope to singleton, eg: <bean id=\"" + value
+ "\" scope=\"singleton\" ...>");
}
}
reference = new RuntimeBeanReference(value);
}
if (reference != null) {
beanDefinition.getPropertyValues().addPropertyValue(beanProperty, reference);
}
}
}
}
}
NamedNodeMap attributes = element.getAttributes();
int len = attributes.getLength();
for (int i = 0; i < len; i++) {
Node node = attributes.item(i);
String name = node.getLocalName();
if (!processedProps.contains(name)) {
if (parameters == null) {
parameters = new ManagedMap();
}
String value = node.getNodeValue();
parameters.put(name, new TypedStringValue(value, String.class));
}
}
if (parameters != null) {
beanDefinition.getPropertyValues().addPropertyValue("parameters", parameters);
}
// post-process after parse attributes
if (ProviderConfig.class.equals(beanClass)) {
parseNested(
element, parserContext, ServiceBean.class, true, "service", "provider", beanName, beanDefinition);
} else if (ConsumerConfig.class.equals(beanClass)) {
parseNested(
element,
parserContext,
ReferenceBean.class,
true,
"reference",
"consumer",
beanName,
beanDefinition);
} else if (ReferenceBean.class.equals(beanClass)) {
configReferenceBean(element, parserContext, beanDefinition, null);
} else if (MetricsConfig.class.equals(beanClass)) {
parseMetrics(element, parserContext, beanDefinition);
}
// register bean definition
if (parserContext.getRegistry().containsBeanDefinition(beanName)) {
throw new IllegalStateException("Duplicate spring bean name: " + beanName);
}
if (registered) {
parserContext.getRegistry().registerBeanDefinition(beanName, beanDefinition);
}
return beanDefinition;
}
private static void parseMetrics(Element element, ParserContext parserContext, RootBeanDefinition beanDefinition) {
NodeList childNodes = element.getChildNodes();
PrometheusConfig prometheus = null;
for (int i = 0; i < childNodes.getLength(); i++) {
if (!(childNodes.item(i) instanceof Element)) {
continue;
}
Element child = (Element) childNodes.item(i);
if ("aggregation".equals(child.getNodeName()) || "aggregation".equals(child.getLocalName())) {
AggregationConfig aggregation = new AggregationConfig();
assignProperties(aggregation, child, parserContext);
beanDefinition.getPropertyValues().addPropertyValue("aggregation", aggregation);
} else if ("histogram".equals(child.getNodeName()) || "histogram".equals(child.getLocalName())) {
HistogramConfig histogram = new HistogramConfig();
assignProperties(histogram, child, parserContext);
beanDefinition.getPropertyValues().addPropertyValue("histogram", histogram);
} else if ("prometheus-exporter".equals(child.getNodeName())
|| "prometheus-exporter".equals(child.getLocalName())) {
if (prometheus == null) {
prometheus = new PrometheusConfig();
}
PrometheusConfig.Exporter exporter = new PrometheusConfig.Exporter();
assignProperties(exporter, child, parserContext);
prometheus.setExporter(exporter);
} else if ("prometheus-pushgateway".equals(child.getNodeName())
|| "prometheus-pushgateway".equals(child.getLocalName())) {
if (prometheus == null) {
prometheus = new PrometheusConfig();
}
PrometheusConfig.Pushgateway pushgateway = new PrometheusConfig.Pushgateway();
assignProperties(pushgateway, child, parserContext);
prometheus.setPushgateway(pushgateway);
}
}
if (prometheus != null) {
beanDefinition.getPropertyValues().addPropertyValue("prometheus", prometheus);
}
}
private static void assignProperties(Object obj, Element ele, ParserContext parserContext) {
Method[] methods = obj.getClass().getMethods();
for (Method method : methods) {
if (MethodUtils.isSetter(method)) {
String beanProperty = method.getName().substring(3, 4).toLowerCase()
+ method.getName().substring(4);
String property = StringUtils.camelToSplitName(beanProperty, "-");
String value = resolveAttribute(ele, property, parserContext);
if (StringUtils.isNotEmpty(value)) {
try {
Object v = ClassUtils.convertPrimitive(method.getParameterTypes()[0], value);
method.invoke(obj, v);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
}
}
}
private static void configReferenceBean(
Element element,
ParserContext parserContext,
RootBeanDefinition beanDefinition,
BeanDefinition consumerDefinition) {
// process interface class
String interfaceName = resolveAttribute(element, ReferenceAttributes.INTERFACE, parserContext);
String generic = resolveAttribute(element, ReferenceAttributes.GENERIC, parserContext);
if (StringUtils.isBlank(generic) && consumerDefinition != null) {
// get generic from consumerConfig
generic = getPropertyValue(consumerDefinition.getPropertyValues(), ReferenceAttributes.GENERIC);
}
if (generic != null) {
generic = resolvePlaceholders(generic, parserContext);
beanDefinition.getPropertyValues().add(ReferenceAttributes.GENERIC, generic);
}
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_NAME, interfaceName);
Class interfaceClass = ReferenceConfig.determineInterfaceClass(generic, interfaceName);
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_CLASS, interfaceClass);
// TODO Only register one reference bean for same (group, interface, version)
// create decorated definition for reference bean, Avoid being instantiated when getting the beanType of
// ReferenceBean
// see org.springframework.beans.factory.support.AbstractBeanFactory#getTypeForFactoryBean()
GenericBeanDefinition targetDefinition = new GenericBeanDefinition();
targetDefinition.setBeanClass(interfaceClass);
String beanName = (String) beanDefinition.getAttribute(BEAN_NAME);
beanDefinition.setDecoratedDefinition(new BeanDefinitionHolder(targetDefinition, beanName + "_decorated"));
// signal object type since Spring 5.2
beanDefinition.setAttribute(Constants.OBJECT_TYPE_ATTRIBUTE, interfaceClass);
// mark property value as optional
List<PropertyValue> propertyValues = beanDefinition.getPropertyValues().getPropertyValueList();
for (PropertyValue propertyValue : propertyValues) {
propertyValue.setOptional(true);
}
}
private static void getPropertyMap(Class<?> beanClass, Map<String, Class> beanPropsMap) {
for (Method setter : beanClass.getMethods()) {
String name = setter.getName();
if (name.length() > 3
&& name.startsWith("set")
&& Modifier.isPublic(setter.getModifiers())
&& setter.getParameterTypes().length == 1) {
Class<?> type = setter.getParameterTypes()[0];
String beanProperty = name.substring(3, 4).toLowerCase() + name.substring(4);
// check the setter/getter whether match
Method getter = null;
try {
getter = beanClass.getMethod("get" + name.substring(3), new Class<?>[0]);
} catch (NoSuchMethodException e) {
try {
getter = beanClass.getMethod("is" + name.substring(3), new Class<?>[0]);
} catch (NoSuchMethodException e2) {
// ignore, there is no need any log here since some class implement the interface:
// EnvironmentAware,
// ApplicationAware, etc. They only have setter method, otherwise will cause the error log
// during application start up.
}
}
if (getter == null
|| !Modifier.isPublic(getter.getModifiers())
|| !type.equals(getter.getReturnType())) {
continue;
}
beanPropsMap.put(beanProperty, type);
}
}
}
private static String getCompatibleDefaultValue(String property, String value) {
if ("async".equals(property) && "false".equals(value)
|| "timeout".equals(property) && "0".equals(value)
|| "delay".equals(property) && "0".equals(value)
|| "version".equals(property) && "0.0.0".equals(value)
|| "stat".equals(property) && "-1".equals(value)
|| "reliable".equals(property) && "false".equals(value)) {
// backward compatibility for the default value in old version's xsd
value = null;
}
return value;
}
private static boolean isPrimitive(Class<?> cls) {
return cls.isPrimitive()
|| cls == Boolean.class
|| cls == Byte.class
|| cls == Character.class
|| cls == Short.class
|| cls == Integer.class
|| cls == Long.class
|| cls == Float.class
|| cls == Double.class
|| cls == String.class
|| cls == Date.class
|| cls == Class.class;
}
private static void parseNested(
Element element,
ParserContext parserContext,
Class<?> beanClass,
boolean registered,
String tag,
String property,
String ref,
BeanDefinition beanDefinition) {
NodeList nodeList = element.getChildNodes();
if (nodeList == null) {
return;
}
boolean first = true;
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (!(node instanceof Element)) {
continue;
}
if (tag.equals(node.getNodeName()) || tag.equals(node.getLocalName())) {
if (first) {
first = false;
String isDefault = resolveAttribute(element, "default", parserContext);
if (StringUtils.isEmpty(isDefault)) {
beanDefinition.getPropertyValues().addPropertyValue("default", "false");
}
}
RootBeanDefinition subDefinition = parse((Element) node, parserContext, beanClass, registered);
if (subDefinition != null) {
if (StringUtils.isNotEmpty(ref)) {
subDefinition.getPropertyValues().addPropertyValue(property, new RuntimeBeanReference(ref));
}
if (ReferenceBean.class.equals(beanClass)) {
configReferenceBean((Element) node, parserContext, subDefinition, beanDefinition);
}
}
}
}
}
private static void parseProperties(
NodeList nodeList, RootBeanDefinition beanDefinition, ParserContext parserContext) {
if (nodeList == null) {
return;
}
for (int i = 0; i < nodeList.getLength(); i++) {
if (!(nodeList.item(i) instanceof Element)) {
continue;
}
Element element = (Element) nodeList.item(i);
if ("property".equals(element.getNodeName()) || "property".equals(element.getLocalName())) {
String name = resolveAttribute(element, "name", parserContext);
if (StringUtils.isNotEmpty(name)) {
String value = resolveAttribute(element, "value", parserContext);
String ref = resolveAttribute(element, "ref", parserContext);
if (StringUtils.isNotEmpty(value)) {
beanDefinition.getPropertyValues().addPropertyValue(name, value);
} else if (StringUtils.isNotEmpty(ref)) {
beanDefinition.getPropertyValues().addPropertyValue(name, new RuntimeBeanReference(ref));
} else {
throw new UnsupportedOperationException("Unsupported <property name=\"" + name
+ "\"> sub tag, Only supported <property name=\"" + name
+ "\" ref=\"...\" /> or <property name=\"" + name + "\" value=\"...\" />");
}
}
}
}
}
@SuppressWarnings("unchecked")
private static ManagedMap parseParameters(
NodeList nodeList, RootBeanDefinition beanDefinition, ParserContext parserContext) {
if (nodeList == null) {
return null;
}
ManagedMap parameters = null;
for (int i = 0; i < nodeList.getLength(); i++) {
if (!(nodeList.item(i) instanceof Element)) {
continue;
}
Element element = (Element) nodeList.item(i);
if ("parameter".equals(element.getNodeName()) || "parameter".equals(element.getLocalName())) {
if (parameters == null) {
parameters = new ManagedMap();
}
String key = resolveAttribute(element, "key", parserContext);
String value = resolveAttribute(element, "value", parserContext);
boolean hide = "true".equals(resolveAttribute(element, "hide", parserContext));
if (hide) {
key = HIDE_KEY_PREFIX + key;
}
parameters.put(key, new TypedStringValue(value, String.class));
}
}
return parameters;
}
@SuppressWarnings("unchecked")
private static void parseMethods(
String id, NodeList nodeList, RootBeanDefinition beanDefinition, ParserContext parserContext) {
if (nodeList == null) {
return;
}
ManagedList methods = null;
for (int i = 0; i < nodeList.getLength(); i++) {
if (!(nodeList.item(i) instanceof Element)) {
continue;
}
Element element = (Element) nodeList.item(i);
if ("method".equals(element.getNodeName()) || "method".equals(element.getLocalName())) {
String methodName = resolveAttribute(element, "name", parserContext);
if (StringUtils.isEmpty(methodName)) {
throw new IllegalStateException("<dubbo:method> name attribute == null");
}
if (methods == null) {
methods = new ManagedList();
}
RootBeanDefinition methodBeanDefinition = parse(element, parserContext, MethodConfig.class, false);
String beanName = id + "." + methodName;
// If the PropertyValue named "id" can't be found,
// bean name will be taken as the "id" PropertyValue for MethodConfig
if (!hasPropertyValue(methodBeanDefinition, "id")) {
addPropertyValue(methodBeanDefinition, "id", beanName);
}
BeanDefinitionHolder methodBeanDefinitionHolder =
new BeanDefinitionHolder(methodBeanDefinition, beanName);
methods.add(methodBeanDefinitionHolder);
}
}
if (methods != null) {
beanDefinition.getPropertyValues().addPropertyValue("methods", methods);
}
}
private static boolean hasPropertyValue(AbstractBeanDefinition beanDefinition, String propertyName) {
return beanDefinition.getPropertyValues().contains(propertyName);
}
private static void addPropertyValue(
AbstractBeanDefinition beanDefinition, String propertyName, String propertyValue) {
if (StringUtils.isBlank(propertyName) || StringUtils.isBlank(propertyValue)) {
return;
}
beanDefinition.getPropertyValues().addPropertyValue(propertyName, propertyValue);
}
@SuppressWarnings("unchecked")
private static void parseArguments(
String id, NodeList nodeList, RootBeanDefinition beanDefinition, ParserContext parserContext) {
if (nodeList == null) {
return;
}
ManagedList arguments = null;
for (int i = 0; i < nodeList.getLength(); i++) {
if (!(nodeList.item(i) instanceof Element)) {
continue;
}
Element element = (Element) nodeList.item(i);
if ("argument".equals(element.getNodeName()) || "argument".equals(element.getLocalName())) {
String argumentIndex = resolveAttribute(element, "index", parserContext);
if (arguments == null) {
arguments = new ManagedList();
}
BeanDefinition argumentBeanDefinition = parse(element, parserContext, ArgumentConfig.class, false);
String name = id + "." + argumentIndex;
BeanDefinitionHolder argumentBeanDefinitionHolder =
new BeanDefinitionHolder(argumentBeanDefinition, name);
arguments.add(argumentBeanDefinitionHolder);
}
}
if (arguments != null) {
beanDefinition.getPropertyValues().addPropertyValue("arguments", arguments);
}
}
@Override
public BeanDefinition parse(Element element, ParserContext parserContext) {
return parse(element, parserContext, beanClass, true);
}
private static String resolveAttribute(Element element, String attributeName, ParserContext parserContext) {
String attributeValue = element.getAttribute(attributeName);
// Early resolve place holder may be wrong ( Before
// PropertySourcesPlaceholderConfigurer/PropertyPlaceholderConfigurer )
// https://github.com/apache/dubbo/pull/6079
// https://github.com/apache/dubbo/issues/6035
// Environment environment = parserContext.getReaderContext().getEnvironment();
// return environment.resolvePlaceholders(attributeValue);
return attributeValue;
}
private static String resolvePlaceholders(String str, ParserContext parserContext) {
if (resolvePlaceholdersEnabled) {
try {
return parserContext.getReaderContext().getEnvironment().resolveRequiredPlaceholders(str);
} catch (NoSuchMethodError e) {
resolvePlaceholdersEnabled = false;
}
}
return str;
}
}
| 8,800 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/schema/DubboNamespaceHandler.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.schema;
import org.apache.dubbo.config.ApplicationConfig;
import org.apache.dubbo.config.ConsumerConfig;
import org.apache.dubbo.config.MetadataReportConfig;
import org.apache.dubbo.config.MetricsConfig;
import org.apache.dubbo.config.ModuleConfig;
import org.apache.dubbo.config.MonitorConfig;
import org.apache.dubbo.config.ProtocolConfig;
import org.apache.dubbo.config.ProviderConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.apache.dubbo.config.SslConfig;
import org.apache.dubbo.config.TracingConfig;
import org.apache.dubbo.config.spring.ConfigCenterBean;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.ServiceBean;
import org.apache.dubbo.config.spring.beans.factory.config.ConfigurableSourceBeanMetadataElement;
import org.apache.dubbo.config.spring.context.DubboSpringInitializer;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.context.annotation.AnnotationConfigUtils;
import org.w3c.dom.Element;
/**
* DubboNamespaceHandler
*
* @export
*/
public class DubboNamespaceHandler extends NamespaceHandlerSupport implements ConfigurableSourceBeanMetadataElement {
@Override
public void init() {
registerBeanDefinitionParser("application", new DubboBeanDefinitionParser(ApplicationConfig.class));
registerBeanDefinitionParser("module", new DubboBeanDefinitionParser(ModuleConfig.class));
registerBeanDefinitionParser("registry", new DubboBeanDefinitionParser(RegistryConfig.class));
registerBeanDefinitionParser("config-center", new DubboBeanDefinitionParser(ConfigCenterBean.class));
registerBeanDefinitionParser("metadata-report", new DubboBeanDefinitionParser(MetadataReportConfig.class));
registerBeanDefinitionParser("monitor", new DubboBeanDefinitionParser(MonitorConfig.class));
registerBeanDefinitionParser("metrics", new DubboBeanDefinitionParser(MetricsConfig.class));
registerBeanDefinitionParser("tracing", new DubboBeanDefinitionParser(TracingConfig.class));
registerBeanDefinitionParser("ssl", new DubboBeanDefinitionParser(SslConfig.class));
registerBeanDefinitionParser("provider", new DubboBeanDefinitionParser(ProviderConfig.class));
registerBeanDefinitionParser("consumer", new DubboBeanDefinitionParser(ConsumerConfig.class));
registerBeanDefinitionParser("protocol", new DubboBeanDefinitionParser(ProtocolConfig.class));
registerBeanDefinitionParser("service", new DubboBeanDefinitionParser(ServiceBean.class));
registerBeanDefinitionParser("reference", new DubboBeanDefinitionParser(ReferenceBean.class));
registerBeanDefinitionParser("annotation", new AnnotationBeanDefinitionParser());
}
/**
* Override {@link NamespaceHandlerSupport#parse(Element, ParserContext)} method
*
* @param element {@link Element}
* @param parserContext {@link ParserContext}
* @return
* @since 2.7.5
*/
@Override
public BeanDefinition parse(Element element, ParserContext parserContext) {
BeanDefinitionRegistry registry = parserContext.getRegistry();
registerAnnotationConfigProcessors(registry);
// initialize dubbo beans
DubboSpringInitializer.initialize(parserContext.getRegistry());
BeanDefinition beanDefinition = super.parse(element, parserContext);
setSource(beanDefinition);
return beanDefinition;
}
/**
* Register the processors for the Spring Annotation-Driven features
*
* @param registry {@link BeanDefinitionRegistry}
* @see AnnotationConfigUtils
* @since 2.7.5
*/
private void registerAnnotationConfigProcessors(BeanDefinitionRegistry registry) {
AnnotationConfigUtils.registerAnnotationConfigProcessors(registry);
}
}
| 8,801 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/config/DubboConfigDefaultPropertyValueBeanPostProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.config;
import org.apache.dubbo.config.AbstractConfig;
import org.apache.dubbo.config.Constants;
import org.apache.dubbo.config.spring.util.GenericBeanPostProcessorAdapter;
import org.apache.dubbo.config.spring.util.ObjectUtils;
import javax.annotation.PostConstruct;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.support.MergedBeanDefinitionPostProcessor;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.annotation.CommonAnnotationBeanPostProcessor;
import org.springframework.core.Ordered;
import org.springframework.core.PriorityOrdered;
import static org.springframework.aop.support.AopUtils.getTargetClass;
import static org.springframework.beans.BeanUtils.getPropertyDescriptor;
import static org.springframework.util.ReflectionUtils.invokeMethod;
/**
* The {@link BeanPostProcessor} class for the default property value of {@link AbstractConfig Dubbo's Config Beans}
*
* @since 2.7.6
*/
public class DubboConfigDefaultPropertyValueBeanPostProcessor extends GenericBeanPostProcessorAdapter<AbstractConfig>
implements MergedBeanDefinitionPostProcessor, PriorityOrdered {
/**
* The bean name of {@link DubboConfigDefaultPropertyValueBeanPostProcessor}
*/
public static final String BEAN_NAME = "dubboConfigDefaultPropertyValueBeanPostProcessor";
@Override
protected void processBeforeInitialization(AbstractConfig dubboConfigBean, String beanName) throws BeansException {
// ignore auto generate bean name
if (!beanName.contains("#")) {
// [Feature] https://github.com/apache/dubbo/issues/5721
setPropertyIfAbsent(dubboConfigBean, Constants.ID, beanName);
// beanName should not be used as config name, fix https://github.com/apache/dubbo/pull/7624
// setPropertyIfAbsent(dubboConfigBean, "name", beanName);
}
}
@Override
public void postProcessMergedBeanDefinition(RootBeanDefinition beanDefinition, Class<?> beanType, String beanName) {
// DO NOTHING
}
protected void setPropertyIfAbsent(Object bean, String propertyName, String beanName) {
Class<?> beanClass = getTargetClass(bean);
PropertyDescriptor propertyDescriptor = getPropertyDescriptor(beanClass, propertyName);
if (propertyDescriptor != null) { // the property is present
Method getterMethod = propertyDescriptor.getReadMethod();
if (getterMethod == null) { // if The getter method is absent
return;
}
Object propertyValue = invokeMethod(getterMethod, bean);
if (propertyValue != null) { // If The return value of "getId" method is not null
return;
}
Method setterMethod = propertyDescriptor.getWriteMethod();
if (setterMethod != null) { // the getter and setter methods are present
if (Arrays.equals(
ObjectUtils.of(String.class), setterMethod.getParameterTypes())) { // the param type is String
// set bean name to the value of the property
invokeMethod(setterMethod, bean, beanName);
}
}
}
}
/**
* @return Higher than {@link InitDestroyAnnotationBeanPostProcessor#getOrder()}
* @see InitDestroyAnnotationBeanPostProcessor
* @see CommonAnnotationBeanPostProcessor
* @see PostConstruct
*/
@Override
public int getOrder() {
return Ordered.LOWEST_PRECEDENCE + 1;
}
}
| 8,802 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/config/ConfigurableSourceBeanMetadataElement.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.config;
import org.springframework.beans.BeanMetadataAttributeAccessor;
import org.springframework.beans.BeanMetadataElement;
/**
* Configurable the {@link BeanMetadataAttributeAccessor#setSource(Object) source} for {@link BeanMetadataElement}
*
* @since 2.7.5
*/
public interface ConfigurableSourceBeanMetadataElement {
/**
* Set the source into the specified {@link BeanMetadataElement}
*
* @param beanMetadataElement {@link BeanMetadataElement} instance
*/
default void setSource(BeanMetadataElement beanMetadataElement) {
if (beanMetadataElement instanceof BeanMetadataAttributeAccessor) {
((BeanMetadataAttributeAccessor) beanMetadataElement).setSource(this);
}
}
}
| 8,803 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/DubboConfigAliasPostProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.config.AbstractConfig;
import org.apache.dubbo.config.spring.context.annotation.DubboConfigConfigurationRegistrar;
import org.apache.dubbo.config.spring.util.BeanRegistrar;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor;
import static org.springframework.util.ObjectUtils.nullSafeEquals;
import static org.springframework.util.StringUtils.hasText;
/**
* A Post-Processor class to set the alias of Dubbo Config bean using its {@link AbstractConfig#getId()}
*
* @since 2.7.5
*/
public class DubboConfigAliasPostProcessor implements BeanDefinitionRegistryPostProcessor, BeanPostProcessor {
/**
* The bean name of {@link DubboConfigConfigurationRegistrar}
*/
public static final String BEAN_NAME = "dubboConfigAliasPostProcessor";
private BeanDefinitionRegistry registry;
@Override
public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {
this.registry = registry;
}
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
// DO NOTHING
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
// DO NOTHING
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof AbstractConfig) {
String id = ((AbstractConfig) bean).getId();
if (hasText(id) // id MUST be present in AbstractConfig
&& !nullSafeEquals(id, beanName) // id MUST NOT be equal to bean name
&& !BeanRegistrar.hasAlias(registry, beanName, id)) { // id MUST NOT be present in AliasRegistry
registry.registerAlias(beanName, id);
}
}
return bean;
}
}
| 8,804 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/AnnotationPropertyValuesAdapter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import java.lang.annotation.Annotation;
import java.util.Map;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.PropertyValue;
import org.springframework.beans.PropertyValues;
import org.springframework.core.env.PropertyResolver;
/**
* {@link Annotation} {@link PropertyValues} Adapter
*
* @see Annotation
* @see PropertyValues
* @since 2.5.11
*/
public class AnnotationPropertyValuesAdapter implements PropertyValues {
private final PropertyValues delegate;
/**
* @param attributes
* @param propertyResolver
* @param ignoreAttributeNames
* @since 2.7.3
*/
public AnnotationPropertyValuesAdapter(
Map<String, Object> attributes, PropertyResolver propertyResolver, String... ignoreAttributeNames) {
this.delegate = new MutablePropertyValues(
AnnotationUtils.getAttributes(attributes, propertyResolver, ignoreAttributeNames));
}
public AnnotationPropertyValuesAdapter(
Annotation annotation,
PropertyResolver propertyResolver,
boolean ignoreDefaultValue,
String... ignoreAttributeNames) {
this.delegate = new MutablePropertyValues(
AnnotationUtils.getAttributes(annotation, propertyResolver, ignoreDefaultValue, ignoreAttributeNames));
}
public AnnotationPropertyValuesAdapter(
Annotation annotation, PropertyResolver propertyResolver, String... ignoreAttributeNames) {
this(annotation, propertyResolver, true, ignoreAttributeNames);
}
@Override
public PropertyValue[] getPropertyValues() {
return delegate.getPropertyValues();
}
@Override
public PropertyValue getPropertyValue(String propertyName) {
return delegate.getPropertyValue(propertyName);
}
@Override
public PropertyValues changesSince(PropertyValues old) {
return delegate.changesSince(old);
}
@Override
public boolean contains(String propertyName) {
return delegate.contains(propertyName);
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
}
| 8,805 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ServiceAnnotationPostProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.common.compact.Dubbo2CompactUtils;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.AnnotationUtils;
import org.apache.dubbo.common.utils.ClassUtils;
import org.apache.dubbo.common.utils.ReflectUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.Constants;
import org.apache.dubbo.config.MethodConfig;
import org.apache.dubbo.config.annotation.DubboService;
import org.apache.dubbo.config.annotation.Method;
import org.apache.dubbo.config.annotation.Service;
import org.apache.dubbo.config.spring.ServiceBean;
import org.apache.dubbo.config.spring.context.annotation.DubboClassPathBeanDefinitionScanner;
import org.apache.dubbo.config.spring.schema.AnnotationBeanDefinitionParser;
import org.apache.dubbo.config.spring.util.DubboAnnotationUtils;
import org.apache.dubbo.config.spring.util.ObjectUtils;
import org.apache.dubbo.config.spring.util.SpringCompatUtils;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.springframework.beans.BeansException;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanDefinitionStoreException;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.SingletonBeanRegistry;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor;
import org.springframework.beans.factory.support.BeanNameGenerator;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.context.annotation.AnnotationBeanNameGenerator;
import org.springframework.context.annotation.AnnotationConfigUtils;
import org.springframework.context.annotation.ClassPathBeanDefinitionScanner;
import org.springframework.context.annotation.ConfigurationClassPostProcessor;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.type.MethodMetadata;
import org.springframework.core.type.classreading.MetadataReader;
import org.springframework.core.type.classreading.MetadataReaderFactory;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.core.type.filter.TypeFilter;
import org.springframework.util.CollectionUtils;
import static java.util.Arrays.asList;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_DUPLICATED_BEAN_DEFINITION;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_NO_ANNOTATIONS_FOUND;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_NO_BEANS_SCANNED;
import static org.apache.dubbo.common.utils.AnnotationUtils.filterDefaultValues;
import static org.apache.dubbo.config.spring.beans.factory.annotation.ServiceBeanNameBuilder.create;
import static org.apache.dubbo.config.spring.util.DubboAnnotationUtils.resolveInterfaceName;
import static org.springframework.beans.factory.support.BeanDefinitionBuilder.rootBeanDefinition;
import static org.springframework.context.annotation.AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR;
import static org.springframework.util.ClassUtils.resolveClassName;
/**
* A {@link BeanFactoryPostProcessor} used for processing of {@link Service @Service} annotated classes and annotated bean in java config classes.
* It's also the infrastructure class of XML {@link BeanDefinitionParser} on <dubbo:annotation />
*
*
* @see AnnotationBeanDefinitionParser
* @see BeanDefinitionRegistryPostProcessor
* @since 2.7.7
*/
public class ServiceAnnotationPostProcessor
implements BeanDefinitionRegistryPostProcessor,
EnvironmentAware,
ResourceLoaderAware,
BeanClassLoaderAware,
ApplicationContextAware,
InitializingBean {
public static final String BEAN_NAME = "dubboServiceAnnotationPostProcessor";
private static final List<Class<? extends Annotation>> serviceAnnotationTypes = loadServiceAnnotationTypes();
private static List<Class<? extends Annotation>> loadServiceAnnotationTypes() {
if (Dubbo2CompactUtils.isEnabled() && Dubbo2CompactUtils.isServiceClassLoaded()) {
return asList(
// @since 2.7.7 Add the @DubboService , the issue : https://github.com/apache/dubbo/issues/6007
DubboService.class,
// @since 2.7.0 the substitute @com.alibaba.dubbo.config.annotation.Service
Service.class,
// @since 2.7.3 Add the compatibility for legacy Dubbo's @Service , the issue :
// https://github.com/apache/dubbo/issues/4330
Dubbo2CompactUtils.getServiceClass());
} else {
return asList(
// @since 2.7.7 Add the @DubboService , the issue : https://github.com/apache/dubbo/issues/6007
DubboService.class,
// @since 2.7.0 the substitute @com.alibaba.dubbo.config.annotation.Service
Service.class);
}
}
private final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
protected final Set<String> packagesToScan;
private Set<String> resolvedPackagesToScan;
private Environment environment;
private ResourceLoader resourceLoader;
private ClassLoader classLoader;
private BeanDefinitionRegistry registry;
private ServicePackagesHolder servicePackagesHolder;
private volatile boolean scanned = false;
public ServiceAnnotationPostProcessor(String... packagesToScan) {
this(asList(packagesToScan));
}
public ServiceAnnotationPostProcessor(Collection<String> packagesToScan) {
this(new LinkedHashSet<>(packagesToScan));
}
public ServiceAnnotationPostProcessor(Set<String> packagesToScan) {
this.packagesToScan = packagesToScan;
}
@Override
public void afterPropertiesSet() throws Exception {
this.resolvedPackagesToScan = resolvePackagesToScan(packagesToScan);
}
@Override
public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {
this.registry = registry;
scanServiceBeans(resolvedPackagesToScan, registry);
}
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
if (this.registry == null) {
// In spring 3.x, may be not call postProcessBeanDefinitionRegistry()
this.registry = (BeanDefinitionRegistry) beanFactory;
}
// scan bean definitions
String[] beanNames = beanFactory.getBeanDefinitionNames();
for (String beanName : beanNames) {
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanName);
Map<String, Object> annotationAttributes = getServiceAnnotationAttributes(beanDefinition);
if (annotationAttributes != null) {
// process @DubboService at java-config @bean method
processAnnotatedBeanDefinition(
beanName, (AnnotatedBeanDefinition) beanDefinition, annotationAttributes);
}
}
if (!scanned) {
// In spring 3.x, may be not call postProcessBeanDefinitionRegistry(), so scan service class here
scanServiceBeans(resolvedPackagesToScan, registry);
}
}
/**
* Scan and registers service beans whose classes was annotated {@link Service}
*
* @param packagesToScan The base packages to scan
* @param registry {@link BeanDefinitionRegistry}
*/
private void scanServiceBeans(Set<String> packagesToScan, BeanDefinitionRegistry registry) {
scanned = true;
if (CollectionUtils.isEmpty(packagesToScan)) {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_NO_BEANS_SCANNED,
"",
"",
"packagesToScan is empty , ServiceBean registry will be ignored!");
}
return;
}
DubboClassPathBeanDefinitionScanner scanner =
new DubboClassPathBeanDefinitionScanner(registry, environment, resourceLoader);
BeanNameGenerator beanNameGenerator = resolveBeanNameGenerator(registry);
scanner.setBeanNameGenerator(beanNameGenerator);
for (Class<? extends Annotation> annotationType : serviceAnnotationTypes) {
scanner.addIncludeFilter(new AnnotationTypeFilter(annotationType));
}
ScanExcludeFilter scanExcludeFilter = new ScanExcludeFilter();
scanner.addExcludeFilter(scanExcludeFilter);
for (String packageToScan : packagesToScan) {
// avoid duplicated scans
if (servicePackagesHolder.isPackageScanned(packageToScan)) {
if (logger.isInfoEnabled()) {
logger.info("Ignore package who has already bean scanned: " + packageToScan);
}
continue;
}
// Registers @Service Bean first
scanner.scan(packageToScan);
// Finds all BeanDefinitionHolders of @Service whether @ComponentScan scans or not.
Set<BeanDefinitionHolder> beanDefinitionHolders =
findServiceBeanDefinitionHolders(scanner, packageToScan, registry, beanNameGenerator);
if (!CollectionUtils.isEmpty(beanDefinitionHolders)) {
if (logger.isInfoEnabled()) {
List<String> serviceClasses = new ArrayList<>(beanDefinitionHolders.size());
for (BeanDefinitionHolder beanDefinitionHolder : beanDefinitionHolders) {
serviceClasses.add(
beanDefinitionHolder.getBeanDefinition().getBeanClassName());
}
logger.info("Found " + beanDefinitionHolders.size()
+ " classes annotated by Dubbo @Service under package [" + packageToScan + "]: "
+ serviceClasses);
}
for (BeanDefinitionHolder beanDefinitionHolder : beanDefinitionHolders) {
processScannedBeanDefinition(beanDefinitionHolder);
servicePackagesHolder.addScannedClass(
beanDefinitionHolder.getBeanDefinition().getBeanClassName());
}
} else {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_NO_ANNOTATIONS_FOUND,
"No annotations were found on the class",
"",
"No class annotated by Dubbo @DubboService or @Service was found under package ["
+ packageToScan + "], ignore re-scanned classes: "
+ scanExcludeFilter.getExcludedCount());
}
}
servicePackagesHolder.addScannedPackage(packageToScan);
}
}
/**
* It'd be better to use BeanNameGenerator instance that should reference
* {@link ConfigurationClassPostProcessor#componentScanBeanNameGenerator},
* thus it maybe a potential problem on bean name generation.
*
* @param registry {@link BeanDefinitionRegistry}
* @return {@link BeanNameGenerator} instance
* @see SingletonBeanRegistry
* @see AnnotationConfigUtils#CONFIGURATION_BEAN_NAME_GENERATOR
* @see ConfigurationClassPostProcessor#processConfigBeanDefinitions
* @since 2.5.8
*/
private BeanNameGenerator resolveBeanNameGenerator(BeanDefinitionRegistry registry) {
BeanNameGenerator beanNameGenerator = null;
if (registry instanceof SingletonBeanRegistry) {
SingletonBeanRegistry singletonBeanRegistry = SingletonBeanRegistry.class.cast(registry);
beanNameGenerator =
(BeanNameGenerator) singletonBeanRegistry.getSingleton(CONFIGURATION_BEAN_NAME_GENERATOR);
}
if (beanNameGenerator == null) {
if (logger.isInfoEnabled()) {
logger.info("BeanNameGenerator bean can't be found in BeanFactory with name ["
+ CONFIGURATION_BEAN_NAME_GENERATOR + "]");
logger.info("BeanNameGenerator will be a instance of " + AnnotationBeanNameGenerator.class.getName()
+ " , it maybe a potential problem on bean name generation.");
}
beanNameGenerator = new AnnotationBeanNameGenerator();
}
return beanNameGenerator;
}
/**
* Finds a {@link Set} of {@link BeanDefinitionHolder BeanDefinitionHolders} whose bean type annotated
* {@link Service} Annotation.
*
* @param scanner {@link ClassPathBeanDefinitionScanner}
* @param packageToScan pachage to scan
* @param registry {@link BeanDefinitionRegistry}
* @return non-null
* @since 2.5.8
*/
private Set<BeanDefinitionHolder> findServiceBeanDefinitionHolders(
ClassPathBeanDefinitionScanner scanner,
String packageToScan,
BeanDefinitionRegistry registry,
BeanNameGenerator beanNameGenerator) {
Set<BeanDefinition> beanDefinitions = scanner.findCandidateComponents(packageToScan);
Set<BeanDefinitionHolder> beanDefinitionHolders = new LinkedHashSet<>(beanDefinitions.size());
for (BeanDefinition beanDefinition : beanDefinitions) {
String beanName = beanNameGenerator.generateBeanName(beanDefinition, registry);
BeanDefinitionHolder beanDefinitionHolder = new BeanDefinitionHolder(beanDefinition, beanName);
beanDefinitionHolders.add(beanDefinitionHolder);
}
return beanDefinitionHolders;
}
/**
* Registers {@link ServiceBean} from new annotated {@link Service} {@link BeanDefinition}
*
* @param beanDefinitionHolder
* @see ServiceBean
* @see BeanDefinition
*/
private void processScannedBeanDefinition(BeanDefinitionHolder beanDefinitionHolder) {
Class<?> beanClass = resolveClass(beanDefinitionHolder);
Annotation service = findServiceAnnotation(beanClass);
// The attributes of @Service annotation
Map<String, Object> serviceAnnotationAttributes = AnnotationUtils.getAttributes(service, true);
String serviceInterface = resolveInterfaceName(serviceAnnotationAttributes, beanClass);
String annotatedServiceBeanName = beanDefinitionHolder.getBeanName();
// ServiceBean Bean name
String beanName = generateServiceBeanName(serviceAnnotationAttributes, serviceInterface);
AbstractBeanDefinition serviceBeanDefinition =
buildServiceBeanDefinition(serviceAnnotationAttributes, serviceInterface, annotatedServiceBeanName);
registerServiceBeanDefinition(beanName, serviceBeanDefinition, serviceInterface);
}
/**
* Find the {@link Annotation annotation} of @Service
*
* @param beanClass the {@link Class class} of Bean
* @return <code>null</code> if not found
* @since 2.7.3
*/
private Annotation findServiceAnnotation(Class<?> beanClass) {
return serviceAnnotationTypes.stream()
.map(annotationType -> ClassUtils.isPresent(
"org.springframework.core.annotation.AnnotatedElementUtils",
Thread.currentThread().getContextClassLoader())
&& ReflectUtils.hasMethod(
org.springframework.core.annotation.AnnotatedElementUtils.class,
"findMergedAnnotation")
? org.springframework.core.annotation.AnnotatedElementUtils.findMergedAnnotation(
beanClass, annotationType)
: org.apache.dubbo.common.utils.AnnotationUtils.findAnnotation(beanClass, annotationType))
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
}
/**
* Generates the bean name of {@link ServiceBean}
*
* @param serviceAnnotationAttributes
* @param serviceInterface the class of interface annotated {@link Service}
* @return ServiceBean@interfaceClassName#annotatedServiceBeanName
* @since 2.7.3
*/
private String generateServiceBeanName(Map<String, Object> serviceAnnotationAttributes, String serviceInterface) {
ServiceBeanNameBuilder builder = create(serviceInterface, environment)
.group((String) serviceAnnotationAttributes.get("group"))
.version((String) serviceAnnotationAttributes.get("version"));
return builder.build();
}
private Class<?> resolveClass(BeanDefinitionHolder beanDefinitionHolder) {
BeanDefinition beanDefinition = beanDefinitionHolder.getBeanDefinition();
return resolveClass(beanDefinition);
}
private Class<?> resolveClass(BeanDefinition beanDefinition) {
String beanClassName = beanDefinition.getBeanClassName();
return resolveClassName(beanClassName, classLoader);
}
private Set<String> resolvePackagesToScan(Set<String> packagesToScan) {
Set<String> resolvedPackagesToScan = new LinkedHashSet<>(packagesToScan.size());
for (String packageToScan : packagesToScan) {
if (StringUtils.hasText(packageToScan)) {
String resolvedPackageToScan = environment.resolvePlaceholders(packageToScan.trim());
resolvedPackagesToScan.add(resolvedPackageToScan);
}
}
return resolvedPackagesToScan;
}
/**
* Build the {@link AbstractBeanDefinition Bean Definition}
*
*
* @param serviceAnnotationAttributes
* @param serviceInterface
* @param refServiceBeanName
* @return
* @since 2.7.3
*/
private AbstractBeanDefinition buildServiceBeanDefinition(
Map<String, Object> serviceAnnotationAttributes, String serviceInterface, String refServiceBeanName) {
BeanDefinitionBuilder builder = rootBeanDefinition(ServiceBean.class);
AbstractBeanDefinition beanDefinition = builder.getBeanDefinition();
beanDefinition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
MutablePropertyValues propertyValues = beanDefinition.getPropertyValues();
String[] ignoreAttributeNames = ObjectUtils.of(
"provider",
"monitor",
"application",
"module",
"registry",
"protocol",
"methods",
"interfaceName",
"parameters",
"executor");
propertyValues.addPropertyValues(
new AnnotationPropertyValuesAdapter(serviceAnnotationAttributes, environment, ignoreAttributeNames));
// set config id, for ConfigManager cache key
// builder.addPropertyValue("id", beanName);
// References "ref" property to annotated-@Service Bean
addPropertyReference(builder, "ref", refServiceBeanName);
// Set interface
builder.addPropertyValue("interface", serviceInterface);
// Convert parameters into map
builder.addPropertyValue("parameters", DubboAnnotationUtils.convertParameters((String[])
serviceAnnotationAttributes.get("parameters")));
// Add methods parameters
List<MethodConfig> methodConfigs = convertMethodConfigs(serviceAnnotationAttributes.get("methods"));
if (!methodConfigs.isEmpty()) {
builder.addPropertyValue("methods", methodConfigs);
}
// convert provider to providerIds
String providerConfigId = (String) serviceAnnotationAttributes.get("provider");
if (StringUtils.hasText(providerConfigId)) {
addPropertyValue(builder, "providerIds", providerConfigId);
}
// Convert registry[] to registryIds
String[] registryConfigIds = (String[]) serviceAnnotationAttributes.get("registry");
if (registryConfigIds != null && registryConfigIds.length > 0) {
resolveStringArray(registryConfigIds);
builder.addPropertyValue("registryIds", StringUtils.join(registryConfigIds, ','));
}
// Convert protocol[] to protocolIds
String[] protocolConfigIds = (String[]) serviceAnnotationAttributes.get("protocol");
if (protocolConfigIds != null && protocolConfigIds.length > 0) {
resolveStringArray(protocolConfigIds);
builder.addPropertyValue("protocolIds", StringUtils.join(protocolConfigIds, ','));
}
// TODO Could we ignore these attributes: applicatin/monitor/module ? Use global config
// monitor reference
String monitorConfigId = (String) serviceAnnotationAttributes.get("monitor");
if (StringUtils.hasText(monitorConfigId)) {
addPropertyReference(builder, "monitor", monitorConfigId);
}
// module reference
String moduleConfigId = (String) serviceAnnotationAttributes.get("module");
if (StringUtils.hasText(moduleConfigId)) {
addPropertyReference(builder, "module", moduleConfigId);
}
String executorBeanName = (String) serviceAnnotationAttributes.get("executor");
if (StringUtils.hasText(executorBeanName)) {
addPropertyReference(builder, "executor", executorBeanName);
}
return builder.getBeanDefinition();
}
private String[] resolveStringArray(String[] strs) {
if (strs == null) {
return null;
}
for (int i = 0; i < strs.length; i++) {
strs[i] = environment.resolvePlaceholders(strs[i]);
}
return strs;
}
private List convertMethodConfigs(Object methodsAnnotation) {
if (methodsAnnotation == null) {
return Collections.EMPTY_LIST;
}
return MethodConfig.constructMethodConfig((Method[]) methodsAnnotation);
}
private void addPropertyReference(BeanDefinitionBuilder builder, String propertyName, String beanName) {
String resolvedBeanName = environment.resolvePlaceholders(beanName);
builder.addPropertyReference(propertyName, resolvedBeanName);
}
private void addPropertyValue(BeanDefinitionBuilder builder, String propertyName, String value) {
String resolvedBeanName = environment.resolvePlaceholders(value);
builder.addPropertyValue(propertyName, resolvedBeanName);
}
/**
* Get dubbo service annotation class at java-config @bean method
* @return return service annotation attributes map if found, or return null if not found.
*/
private Map<String, Object> getServiceAnnotationAttributes(BeanDefinition beanDefinition) {
if (beanDefinition instanceof AnnotatedBeanDefinition) {
AnnotatedBeanDefinition annotatedBeanDefinition = (AnnotatedBeanDefinition) beanDefinition;
MethodMetadata factoryMethodMetadata = SpringCompatUtils.getFactoryMethodMetadata(annotatedBeanDefinition);
if (factoryMethodMetadata != null) {
// try all dubbo service annotation types
for (Class<? extends Annotation> annotationType : serviceAnnotationTypes) {
if (factoryMethodMetadata.isAnnotated(annotationType.getName())) {
// Since Spring 5.2
// return
// factoryMethodMetadata.getAnnotations().get(annotationType).filterDefaultValues().asMap();
// Compatible with Spring 4.x
Map<String, Object> annotationAttributes =
factoryMethodMetadata.getAnnotationAttributes(annotationType.getName());
return filterDefaultValues(annotationType, annotationAttributes);
}
}
}
}
return null;
}
/**
* process @DubboService at java-config @bean method
* <pre class="code">
* @Configuration
* public class ProviderConfig {
*
* @Bean
* @DubboService(group="demo", version="1.2.3")
* public DemoService demoService() {
* return new DemoServiceImpl();
* }
*
* }
* </pre>
* @param refServiceBeanName
* @param refServiceBeanDefinition
* @param attributes
*/
private void processAnnotatedBeanDefinition(
String refServiceBeanName,
AnnotatedBeanDefinition refServiceBeanDefinition,
Map<String, Object> attributes) {
Map<String, Object> serviceAnnotationAttributes = new LinkedHashMap<>(attributes);
// get bean class from return type
String returnTypeName = SpringCompatUtils.getFactoryMethodReturnType(refServiceBeanDefinition);
Class<?> beanClass = resolveClassName(returnTypeName, classLoader);
String serviceInterface = resolveInterfaceName(serviceAnnotationAttributes, beanClass);
// ServiceBean Bean name
String serviceBeanName = generateServiceBeanName(serviceAnnotationAttributes, serviceInterface);
AbstractBeanDefinition serviceBeanDefinition =
buildServiceBeanDefinition(serviceAnnotationAttributes, serviceInterface, refServiceBeanName);
// set id
serviceBeanDefinition.getPropertyValues().add(Constants.ID, serviceBeanName);
registerServiceBeanDefinition(serviceBeanName, serviceBeanDefinition, serviceInterface);
}
private void registerServiceBeanDefinition(
String serviceBeanName, AbstractBeanDefinition serviceBeanDefinition, String serviceInterface) {
// check service bean
if (registry.containsBeanDefinition(serviceBeanName)) {
BeanDefinition existingDefinition = registry.getBeanDefinition(serviceBeanName);
if (existingDefinition.equals(serviceBeanDefinition)) {
// exist equipment bean definition
return;
}
String msg = "Found duplicated BeanDefinition of service interface [" + serviceInterface
+ "] with bean name [" + serviceBeanName + "], existing definition [ " + existingDefinition
+ "], new definition [" + serviceBeanDefinition + "]";
logger.error(CONFIG_DUPLICATED_BEAN_DEFINITION, "", "", msg);
throw new BeanDefinitionStoreException(
serviceBeanDefinition.getResourceDescription(), serviceBeanName, msg);
}
registry.registerBeanDefinition(serviceBeanName, serviceBeanDefinition);
if (logger.isInfoEnabled()) {
logger.info("Register ServiceBean[" + serviceBeanName + "]: " + serviceBeanDefinition);
}
}
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.servicePackagesHolder =
applicationContext.getBean(ServicePackagesHolder.BEAN_NAME, ServicePackagesHolder.class);
}
private class ScanExcludeFilter implements TypeFilter {
private int excludedCount;
@Override
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
String className = metadataReader.getClassMetadata().getClassName();
boolean excluded = servicePackagesHolder.isClassScanned(className);
if (excluded) {
excludedCount++;
}
return excluded;
}
public int getExcludedCount() {
return excludedCount;
}
}
}
| 8,806 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ServiceBeanNameBuilder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.config.annotation.Reference;
import org.apache.dubbo.config.annotation.Service;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.ServiceBean;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.env.Environment;
import org.springframework.util.StringUtils;
import static org.apache.dubbo.config.spring.util.DubboAnnotationUtils.resolveInterfaceName;
import static org.springframework.core.annotation.AnnotationUtils.getAnnotationAttributes;
/**
* Dubbo {@link Service @Service} Bean Builder
*
* @see Service
* @see Reference
* @see ServiceBean
* @see ReferenceBean
* @since 2.6.5
*/
public class ServiceBeanNameBuilder {
private static final String SEPARATOR = ":";
// Required
private final String interfaceClassName;
private final Environment environment;
// Optional
private String version;
private String group;
private ServiceBeanNameBuilder(Class<?> interfaceClass, Environment environment) {
this(interfaceClass.getName(), environment);
}
private ServiceBeanNameBuilder(String interfaceClassName, Environment environment) {
this.interfaceClassName = interfaceClassName;
this.environment = environment;
}
private ServiceBeanNameBuilder(
AnnotationAttributes attributes, Class<?> defaultInterfaceClass, Environment environment) {
this(resolveInterfaceName(attributes, defaultInterfaceClass), environment);
this.group(AnnotationUtils.getAttribute(attributes, "group"));
this.version(AnnotationUtils.getAttribute(attributes, "version"));
}
/**
* @param attributes
* @param defaultInterfaceClass
* @param environment
* @return
* @since 2.7.3
*/
public static ServiceBeanNameBuilder create(
AnnotationAttributes attributes, Class<?> defaultInterfaceClass, Environment environment) {
return new ServiceBeanNameBuilder(attributes, defaultInterfaceClass, environment);
}
public static ServiceBeanNameBuilder create(Class<?> interfaceClass, Environment environment) {
return new ServiceBeanNameBuilder(interfaceClass, environment);
}
public static ServiceBeanNameBuilder create(String interfaceClass, Environment environment) {
return new ServiceBeanNameBuilder(interfaceClass, environment);
}
public static ServiceBeanNameBuilder create(Service service, Class<?> interfaceClass, Environment environment) {
return create(getAnnotationAttributes(service, false, false), interfaceClass, environment);
}
public static ServiceBeanNameBuilder create(Reference reference, Class<?> interfaceClass, Environment environment) {
return create(getAnnotationAttributes(reference, false, false), interfaceClass, environment);
}
private static void append(StringBuilder builder, String value) {
builder.append(SEPARATOR);
if (StringUtils.hasText(value)) {
builder.append(value);
}
}
public ServiceBeanNameBuilder group(String group) {
this.group = group;
return this;
}
public ServiceBeanNameBuilder version(String version) {
this.version = version;
return this;
}
public String build() {
StringBuilder beanNameBuilder = new StringBuilder("ServiceBean");
// Required
append(beanNameBuilder, interfaceClassName);
// Optional
append(beanNameBuilder, version);
append(beanNameBuilder, group);
// Build and remove last ":"
String rawBeanName = beanNameBuilder.toString();
// Resolve placeholders
return environment.resolvePlaceholders(rawBeanName);
}
}
| 8,807 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ReferenceAnnotationBeanPostProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.common.compact.Dubbo2CompactUtils;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.ArrayUtils;
import org.apache.dubbo.common.utils.Assert;
import org.apache.dubbo.common.utils.ClassUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.dubbo.config.annotation.Reference;
import org.apache.dubbo.config.spring.Constants;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.context.event.DubboConfigInitEvent;
import org.apache.dubbo.config.spring.reference.ReferenceAttributes;
import org.apache.dubbo.config.spring.reference.ReferenceBeanManager;
import org.apache.dubbo.config.spring.reference.ReferenceBeanSupport;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import org.apache.dubbo.config.spring.util.SpringCompatUtils;
import org.apache.dubbo.rpc.service.GenericService;
import java.beans.PropertyDescriptor;
import java.lang.annotation.Annotation;
import java.lang.reflect.Member;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.springframework.beans.BeansException;
import org.springframework.beans.PropertyValue;
import org.springframework.beans.PropertyValues;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
import org.springframework.beans.factory.annotation.InjectionMetadata;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.AbstractBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.type.MethodMetadata;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_DUBBO_BEAN_INITIALIZER;
import static org.apache.dubbo.common.utils.AnnotationUtils.filterDefaultValues;
import static org.springframework.util.StringUtils.hasText;
/**
* <p>
* Step 1:
* The purpose of implementing {@link BeanFactoryPostProcessor} is to scan the registration reference bean definition earlier,
* so that it can be shared with the xml bean configuration.
* </p>
*
* <p>
* Step 2:
* By implementing {@link org.springframework.beans.factory.config.InstantiationAwareBeanPostProcessor},
* inject the reference bean instance into the fields and setter methods which annotated with {@link DubboReference}.
* </p>
*
* @see DubboReference
* @see Reference
* @see com.alibaba.dubbo.config.annotation.Reference
* @since 2.5.7
*/
public class ReferenceAnnotationBeanPostProcessor extends AbstractAnnotationBeanPostProcessor
implements ApplicationContextAware, BeanFactoryPostProcessor {
/**
* The bean name of {@link ReferenceAnnotationBeanPostProcessor}
*/
public static final String BEAN_NAME = "referenceAnnotationBeanPostProcessor";
/**
* Cache size
*/
private static final int CACHE_SIZE = Integer.getInteger(BEAN_NAME + ".cache.size", 32);
private final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
private final ConcurrentMap<InjectionMetadata.InjectedElement, String> injectedFieldReferenceBeanCache =
new ConcurrentHashMap<>(CACHE_SIZE);
private final ConcurrentMap<InjectionMetadata.InjectedElement, String> injectedMethodReferenceBeanCache =
new ConcurrentHashMap<>(CACHE_SIZE);
private ApplicationContext applicationContext;
private ReferenceBeanManager referenceBeanManager;
private BeanDefinitionRegistry beanDefinitionRegistry;
/**
* {@link com.alibaba.dubbo.config.annotation.Reference @com.alibaba.dubbo.config.annotation.Reference} has been supported since 2.7.3
* <p>
* {@link DubboReference @DubboReference} has been supported since 2.7.7
*/
public ReferenceAnnotationBeanPostProcessor() {
super(loadAnnotationTypes());
}
@SuppressWarnings("unchecked")
private static Class<? extends Annotation>[] loadAnnotationTypes() {
if (Dubbo2CompactUtils.isEnabled() && Dubbo2CompactUtils.isReferenceClassLoaded()) {
return (Class<? extends Annotation>[])
new Class<?>[] {DubboReference.class, Reference.class, Dubbo2CompactUtils.getReferenceClass()};
} else {
return (Class<? extends Annotation>[]) new Class<?>[] {DubboReference.class, Reference.class};
}
}
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
String[] beanNames = beanFactory.getBeanDefinitionNames();
for (String beanName : beanNames) {
Class<?> beanType;
if (beanFactory.isFactoryBean(beanName)) {
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanName);
if (isReferenceBean(beanDefinition)) {
continue;
}
if (isAnnotatedReferenceBean(beanDefinition)) {
// process @DubboReference at java-config @bean method
processReferenceAnnotatedBeanDefinition(beanName, (AnnotatedBeanDefinition) beanDefinition);
continue;
}
String beanClassName = beanDefinition.getBeanClassName();
beanType = ClassUtils.resolveClass(beanClassName, getClassLoader());
} else {
beanType = beanFactory.getType(beanName);
}
if (beanType != null) {
AnnotatedInjectionMetadata metadata = findInjectionMetadata(beanName, beanType, null);
try {
prepareInjection(metadata);
} catch (BeansException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Prepare dubbo reference injection element failed", e);
}
}
}
if (beanFactory instanceof AbstractBeanFactory) {
List<BeanPostProcessor> beanPostProcessors = ((AbstractBeanFactory) beanFactory).getBeanPostProcessors();
for (BeanPostProcessor beanPostProcessor : beanPostProcessors) {
if (beanPostProcessor == this) {
// This bean has been registered as BeanPostProcessor at
// org.apache.dubbo.config.spring.context.DubboInfraBeanRegisterPostProcessor.postProcessBeanFactory()
// so destroy this bean here, prevent register it as BeanPostProcessor again, avoid cause
// BeanPostProcessorChecker detection error
beanDefinitionRegistry.removeBeanDefinition(BEAN_NAME);
break;
}
}
}
try {
// this is an early event, it will be notified at
// org.springframework.context.support.AbstractApplicationContext.registerListeners()
applicationContext.publishEvent(new DubboConfigInitEvent(applicationContext));
} catch (Exception e) {
// if spring version is less than 4.2, it does not support early application event
logger.warn(
CONFIG_DUBBO_BEAN_INITIALIZER,
"",
"",
"publish early application event failed, please upgrade spring version to 4.2.x or later: " + e);
}
}
/**
* check whether is @DubboReference at java-config @bean method
*/
private boolean isAnnotatedReferenceBean(BeanDefinition beanDefinition) {
if (beanDefinition instanceof AnnotatedBeanDefinition) {
AnnotatedBeanDefinition annotatedBeanDefinition = (AnnotatedBeanDefinition) beanDefinition;
String beanClassName = SpringCompatUtils.getFactoryMethodReturnType(annotatedBeanDefinition);
if (beanClassName != null && ReferenceBean.class.getName().equals(beanClassName)) {
return true;
}
}
return false;
}
/**
* process @DubboReference at java-config @bean method
* <pre class="code">
* @Configuration
* public class ConsumerConfig {
*
* @Bean
* @DubboReference(group="demo", version="1.2.3")
* public ReferenceBean<DemoService> demoService() {
* return new ReferenceBean();
* }
*
* }
* </pre>
*
* @param beanName
* @param beanDefinition
*/
private void processReferenceAnnotatedBeanDefinition(String beanName, AnnotatedBeanDefinition beanDefinition) {
MethodMetadata factoryMethodMetadata = SpringCompatUtils.getFactoryMethodMetadata(beanDefinition);
// Extract beanClass from generic return type of java-config bean method: ReferenceBean<DemoService>
// see
// org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.getTypeForFactoryBeanFromMethod
Class beanClass = getBeanFactory().getType(beanName);
if (beanClass == Object.class) {
beanClass = SpringCompatUtils.getGenericTypeOfReturnType(factoryMethodMetadata);
}
if (beanClass == Object.class) {
// bean class is invalid, ignore it
return;
}
if (beanClass == null) {
String beanMethodSignature =
factoryMethodMetadata.getDeclaringClassName() + "#" + factoryMethodMetadata.getMethodName() + "()";
throw new BeanCreationException(
"The ReferenceBean is missing necessary generic type, which returned by the @Bean method of Java-config class. "
+ "The generic type of the returned ReferenceBean must be specified as the referenced interface type, "
+ "such as ReferenceBean<DemoService>. Please check bean method: "
+ beanMethodSignature);
}
// get dubbo reference annotation attributes
Map<String, Object> annotationAttributes = null;
// try all dubbo reference annotation types
for (Class<? extends Annotation> annotationType : getAnnotationTypes()) {
if (factoryMethodMetadata.isAnnotated(annotationType.getName())) {
// Since Spring 5.2
// return factoryMethodMetadata.getAnnotations().get(annotationType).filterDefaultValues().asMap();
// Compatible with Spring 4.x
annotationAttributes = factoryMethodMetadata.getAnnotationAttributes(annotationType.getName());
annotationAttributes = filterDefaultValues(annotationType, annotationAttributes);
break;
}
}
if (annotationAttributes != null) {
// @DubboReference on @Bean method
LinkedHashMap<String, Object> attributes = new LinkedHashMap<>(annotationAttributes);
// reset id attribute
attributes.put(ReferenceAttributes.ID, beanName);
// convert annotation props
ReferenceBeanSupport.convertReferenceProps(attributes, beanClass);
// get interface
String interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE);
// check beanClass and reference interface class
if (!StringUtils.isEquals(interfaceName, beanClass.getName()) && beanClass != GenericService.class) {
String beanMethodSignature = factoryMethodMetadata.getDeclaringClassName() + "#"
+ factoryMethodMetadata.getMethodName() + "()";
throw new BeanCreationException(
"The 'interfaceClass' or 'interfaceName' attribute value of @DubboReference annotation "
+ "is inconsistent with the generic type of the ReferenceBean returned by the bean method. "
+ "The interface class of @DubboReference is: "
+ interfaceName + ", but return ReferenceBean<" + beanClass.getName() + ">. "
+ "Please remove the 'interfaceClass' and 'interfaceName' attributes from @DubboReference annotation. "
+ "Please check bean method: "
+ beanMethodSignature);
}
Class interfaceClass = beanClass;
// set attribute instead of property values
beanDefinition.setAttribute(Constants.REFERENCE_PROPS, attributes);
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_CLASS, interfaceClass);
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_NAME, interfaceName);
} else {
// raw reference bean
// the ReferenceBean is not yet initialized
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_CLASS, beanClass);
if (beanClass != GenericService.class) {
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_NAME, beanClass.getName());
}
}
// set id
beanDefinition.getPropertyValues().add(ReferenceAttributes.ID, beanName);
}
@Override
public void postProcessMergedBeanDefinition(RootBeanDefinition beanDefinition, Class<?> beanType, String beanName) {
if (beanType != null) {
if (isReferenceBean(beanDefinition)) {
// mark property value as optional
List<PropertyValue> propertyValues =
beanDefinition.getPropertyValues().getPropertyValueList();
for (PropertyValue propertyValue : propertyValues) {
propertyValue.setOptional(true);
}
} else if (isAnnotatedReferenceBean(beanDefinition)) {
// extract beanClass from java-config bean method generic return type: ReferenceBean<DemoService>
// Class beanClass = getBeanFactory().getType(beanName);
} else {
AnnotatedInjectionMetadata metadata = findInjectionMetadata(beanName, beanType, null);
metadata.checkConfigMembers(beanDefinition);
try {
prepareInjection(metadata);
} catch (Exception e) {
throw new IllegalStateException("Prepare dubbo reference injection element failed", e);
}
}
}
}
/**
* Alternatives to the {@link #postProcessProperties(PropertyValues, Object, String)}, that removed as of Spring
* Framework 6.0.0, and in favor of {@link #postProcessProperties(PropertyValues, Object, String)}.
* <p>In order to be compatible with the lower version of Spring, it is still retained.
* @see #postProcessProperties
*/
public PropertyValues postProcessPropertyValues(
PropertyValues pvs, PropertyDescriptor[] pds, Object bean, String beanName) throws BeansException {
return postProcessProperties(pvs, bean, beanName);
}
/**
* Alternatives to the {@link #postProcessPropertyValues(PropertyValues, PropertyDescriptor[], Object, String)}.
* @see #postProcessPropertyValues
*/
@Override
public PropertyValues postProcessProperties(PropertyValues pvs, Object bean, String beanName)
throws BeansException {
try {
AnnotatedInjectionMetadata metadata = findInjectionMetadata(beanName, bean.getClass(), pvs);
prepareInjection(metadata);
metadata.inject(bean, beanName, pvs);
} catch (BeansException ex) {
throw ex;
} catch (Throwable ex) {
throw new BeanCreationException(
beanName, "Injection of @" + getAnnotationType().getSimpleName() + " dependencies is failed", ex);
}
return pvs;
}
private boolean isReferenceBean(BeanDefinition beanDefinition) {
return ReferenceBean.class.getName().equals(beanDefinition.getBeanClassName());
}
protected void prepareInjection(AnnotatedInjectionMetadata metadata) throws BeansException {
try {
// find and register bean definition for @DubboReference/@Reference
for (AnnotatedFieldElement fieldElement : metadata.getFieldElements()) {
if (fieldElement.injectedObject != null) {
continue;
}
Class<?> injectedType = fieldElement.field.getType();
AnnotationAttributes attributes = fieldElement.attributes;
String referenceBeanName = registerReferenceBean(
fieldElement.getPropertyName(), injectedType, attributes, fieldElement.field);
// associate fieldElement and reference bean
fieldElement.injectedObject = referenceBeanName;
injectedFieldReferenceBeanCache.put(fieldElement, referenceBeanName);
}
for (AnnotatedMethodElement methodElement : metadata.getMethodElements()) {
if (methodElement.injectedObject != null) {
continue;
}
Class<?> injectedType = methodElement.getInjectedType();
AnnotationAttributes attributes = methodElement.attributes;
String referenceBeanName = registerReferenceBean(
methodElement.getPropertyName(), injectedType, attributes, methodElement.method);
// associate methodElement and reference bean
methodElement.injectedObject = referenceBeanName;
injectedMethodReferenceBeanCache.put(methodElement, referenceBeanName);
}
} catch (ClassNotFoundException e) {
throw new BeanCreationException("prepare reference annotation failed", e);
}
}
public String registerReferenceBean(
String propertyName, Class<?> injectedType, Map<String, Object> attributes, Member member)
throws BeansException {
boolean renameable = true;
// referenceBeanName
String referenceBeanName = AnnotationUtils.getAttribute(attributes, ReferenceAttributes.ID);
if (hasText(referenceBeanName)) {
renameable = false;
} else {
referenceBeanName = propertyName;
}
String checkLocation = "Please check " + member.toString();
// convert annotation props
ReferenceBeanSupport.convertReferenceProps(attributes, injectedType);
// get interface
String interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE);
if (StringUtils.isBlank(interfaceName)) {
throw new BeanCreationException(
"Need to specify the 'interfaceName' or 'interfaceClass' attribute of '@DubboReference' if enable generic. "
+ checkLocation);
}
// check reference key
String referenceKey = ReferenceBeanSupport.generateReferenceKey(attributes, applicationContext);
// find reference bean name by reference key
List<String> registeredReferenceBeanNames = referenceBeanManager.getBeanNamesByKey(referenceKey);
if (registeredReferenceBeanNames.size() > 0) {
// found same name and reference key
if (registeredReferenceBeanNames.contains(referenceBeanName)) {
return referenceBeanName;
}
}
// check bean definition
boolean isContains;
if ((isContains = beanDefinitionRegistry.containsBeanDefinition(referenceBeanName))
|| beanDefinitionRegistry.isAlias(referenceBeanName)) {
String preReferenceBeanName = referenceBeanName;
if (!isContains) {
// Look in the alias for the origin bean name
String[] aliases = beanDefinitionRegistry.getAliases(referenceBeanName);
if (ArrayUtils.isNotEmpty(aliases)) {
for (String alias : aliases) {
if (beanDefinitionRegistry.containsBeanDefinition(alias)) {
preReferenceBeanName = alias;
break;
}
}
}
}
BeanDefinition prevBeanDefinition = beanDefinitionRegistry.getBeanDefinition(preReferenceBeanName);
String prevBeanType = prevBeanDefinition.getBeanClassName();
String prevBeanDesc = referenceBeanName + "[" + prevBeanType + "]";
String newBeanDesc = referenceBeanName + "[" + referenceKey + "]";
if (isReferenceBean(prevBeanDefinition)) {
// check reference key
String prevReferenceKey =
ReferenceBeanSupport.generateReferenceKey(prevBeanDefinition, applicationContext);
if (StringUtils.isEquals(prevReferenceKey, referenceKey)) {
// found matched dubbo reference bean, ignore register
return referenceBeanName;
}
// get interfaceName from attribute
Assert.notNull(prevBeanDefinition, "The interface class of ReferenceBean is not initialized");
prevBeanDesc = referenceBeanName + "[" + prevReferenceKey + "]";
}
// bean name from attribute 'id' or java-config bean, cannot be renamed
if (!renameable) {
throw new BeanCreationException(
"Already exists another bean definition with the same bean name [" + referenceBeanName + "], "
+ "but cannot rename the reference bean name (specify the id attribute or java-config bean), "
+ "please modify the name of one of the beans: "
+ "prev: "
+ prevBeanDesc + ", new: " + newBeanDesc + ". " + checkLocation);
}
// the prev bean type is different, rename the new reference bean
int index = 2;
String newReferenceBeanName = null;
while (newReferenceBeanName == null
|| beanDefinitionRegistry.containsBeanDefinition(newReferenceBeanName)
|| beanDefinitionRegistry.isAlias(newReferenceBeanName)) {
newReferenceBeanName = referenceBeanName + "#" + index;
index++;
// double check found same name and reference key
if (registeredReferenceBeanNames.contains(newReferenceBeanName)) {
return newReferenceBeanName;
}
}
newBeanDesc = newReferenceBeanName + "[" + referenceKey + "]";
logger.warn(
CONFIG_DUBBO_BEAN_INITIALIZER,
"",
"",
"Already exists another bean definition with the same bean name [" + referenceBeanName + "], "
+ "rename dubbo reference bean to ["
+ newReferenceBeanName + "]. "
+ "It is recommended to modify the name of one of the beans to avoid injection problems. "
+ "prev: "
+ prevBeanDesc + ", new: " + newBeanDesc + ". " + checkLocation);
referenceBeanName = newReferenceBeanName;
}
attributes.put(ReferenceAttributes.ID, referenceBeanName);
// If registered matched reference before, just register alias
if (registeredReferenceBeanNames.size() > 0) {
beanDefinitionRegistry.registerAlias(registeredReferenceBeanNames.get(0), referenceBeanName);
referenceBeanManager.registerReferenceKeyAndBeanName(referenceKey, referenceBeanName);
return referenceBeanName;
}
Class interfaceClass = injectedType;
// TODO Only register one reference bean for same (group, interface, version)
// Register the reference bean definition to the beanFactory
RootBeanDefinition beanDefinition = new RootBeanDefinition();
beanDefinition.setBeanClassName(ReferenceBean.class.getName());
beanDefinition.getPropertyValues().add(ReferenceAttributes.ID, referenceBeanName);
// set attribute instead of property values
beanDefinition.setAttribute(Constants.REFERENCE_PROPS, attributes);
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_CLASS, interfaceClass);
beanDefinition.setAttribute(ReferenceAttributes.INTERFACE_NAME, interfaceName);
// create decorated definition for reference bean, Avoid being instantiated when getting the beanType of
// ReferenceBean
// see org.springframework.beans.factory.support.AbstractBeanFactory#getTypeForFactoryBean()
GenericBeanDefinition targetDefinition = new GenericBeanDefinition();
targetDefinition.setBeanClass(interfaceClass);
beanDefinition.setDecoratedDefinition(
new BeanDefinitionHolder(targetDefinition, referenceBeanName + "_decorated"));
// signal object type since Spring 5.2
beanDefinition.setAttribute(Constants.OBJECT_TYPE_ATTRIBUTE, interfaceClass);
beanDefinitionRegistry.registerBeanDefinition(referenceBeanName, beanDefinition);
referenceBeanManager.registerReferenceKeyAndBeanName(referenceKey, referenceBeanName);
logger.info("Register dubbo reference bean: " + referenceBeanName + " = " + referenceKey + " at " + member);
return referenceBeanName;
}
@Override
protected Object doGetInjectedBean(
AnnotationAttributes attributes,
Object bean,
String beanName,
Class<?> injectedType,
AnnotatedInjectElement injectedElement)
throws Exception {
if (injectedElement.injectedObject == null) {
throw new IllegalStateException(
"The AnnotatedInjectElement of @DubboReference should be inited before injection");
}
return getBeanFactory().getBean((String) injectedElement.injectedObject);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
this.referenceBeanManager =
applicationContext.getBean(ReferenceBeanManager.BEAN_NAME, ReferenceBeanManager.class);
this.beanDefinitionRegistry = (BeanDefinitionRegistry) applicationContext.getAutowireCapableBeanFactory();
}
@Override
public void destroy() throws Exception {
super.destroy();
this.injectedFieldReferenceBeanCache.clear();
this.injectedMethodReferenceBeanCache.clear();
}
/**
* Gets all beans of {@link ReferenceBean}
*
* @deprecated use {@link ReferenceBeanManager#getReferences()} instead
*/
@Deprecated
public Collection<ReferenceBean<?>> getReferenceBeans() {
return Collections.emptyList();
}
/**
* Get {@link ReferenceBean} {@link Map} in injected field.
*
* @return non-null {@link Map}
* @since 2.5.11
*/
public Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> getInjectedFieldReferenceBeanMap() {
Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> map = new HashMap<>();
for (Map.Entry<InjectionMetadata.InjectedElement, String> entry : injectedFieldReferenceBeanCache.entrySet()) {
map.put(entry.getKey(), referenceBeanManager.getById(entry.getValue()));
}
return Collections.unmodifiableMap(map);
}
/**
* Get {@link ReferenceBean} {@link Map} in injected method.
*
* @return non-null {@link Map}
* @since 2.5.11
*/
public Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> getInjectedMethodReferenceBeanMap() {
Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> map = new HashMap<>();
for (Map.Entry<InjectionMetadata.InjectedElement, String> entry : injectedMethodReferenceBeanCache.entrySet()) {
map.put(entry.getKey(), referenceBeanManager.getById(entry.getValue()));
}
return Collections.unmodifiableMap(map);
}
}
| 8,808 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/AbstractAnnotationBeanPostProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import java.beans.PropertyDescriptor;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeansException;
import org.springframework.beans.PropertyValues;
import org.springframework.beans.factory.BeanClassLoaderAware;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.annotation.InjectionMetadata;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.InstantiationAwareBeanPostProcessor;
import org.springframework.beans.factory.support.MergedBeanDefinitionPostProcessor;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.env.Environment;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_DUBBO_BEAN_INITIALIZER;
import static org.springframework.core.BridgeMethodResolver.findBridgedMethod;
import static org.springframework.core.BridgeMethodResolver.isVisibilityBridgeMethodPair;
/**
* Abstract common {@link BeanPostProcessor} implementation for customized annotation that annotated injected-object.
*/
@SuppressWarnings("unchecked")
public abstract class AbstractAnnotationBeanPostProcessor
implements InstantiationAwareBeanPostProcessor,
MergedBeanDefinitionPostProcessor,
BeanFactoryAware,
BeanClassLoaderAware,
EnvironmentAware,
DisposableBean {
private static final int CACHE_SIZE = Integer.getInteger("", 32);
private final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
private final Class<? extends Annotation>[] annotationTypes;
private final ConcurrentMap<String, AbstractAnnotationBeanPostProcessor.AnnotatedInjectionMetadata>
injectionMetadataCache = new ConcurrentHashMap<
String, AbstractAnnotationBeanPostProcessor.AnnotatedInjectionMetadata>(CACHE_SIZE);
private ConfigurableListableBeanFactory beanFactory;
private Environment environment;
private ClassLoader classLoader;
private int order = Ordered.LOWEST_PRECEDENCE;
/**
* @param annotationTypes the multiple types of {@link Annotation annotations}
*/
public AbstractAnnotationBeanPostProcessor(Class<? extends Annotation>... annotationTypes) {
Assert.notEmpty(annotationTypes, "The argument of annotations' types must not empty");
this.annotationTypes = annotationTypes;
}
private static <T> Collection<T> combine(Collection<? extends T>... elements) {
List<T> allElements = new ArrayList<T>();
for (Collection<? extends T> e : elements) {
allElements.addAll(e);
}
return allElements;
}
/**
* Annotation type
*
* @return non-null
* @deprecated 2.7.3, uses {@link #getAnnotationTypes()}
*/
@Deprecated
public final Class<? extends Annotation> getAnnotationType() {
return annotationTypes[0];
}
protected final Class<? extends Annotation>[] getAnnotationTypes() {
return annotationTypes;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
Assert.isInstanceOf(
ConfigurableListableBeanFactory.class,
beanFactory,
"AnnotationInjectedBeanPostProcessor requires a ConfigurableListableBeanFactory");
this.beanFactory = (ConfigurableListableBeanFactory) beanFactory;
}
/**
* Finds {@link InjectionMetadata.InjectedElement} Metadata from annotated fields
*
* @param beanClass The {@link Class} of Bean
* @return non-null {@link List}
*/
private List<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> findFieldAnnotationMetadata(
final Class<?> beanClass) {
final List<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> elements = new LinkedList<>();
ReflectionUtils.doWithFields(beanClass, field -> {
for (Class<? extends Annotation> annotationType : getAnnotationTypes()) {
AnnotationAttributes attributes =
AnnotationUtils.getAnnotationAttributes(field, annotationType, getEnvironment(), true, true);
if (attributes != null) {
if (Modifier.isStatic(field.getModifiers())) {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_DUBBO_BEAN_INITIALIZER,
"",
"",
"@" + annotationType.getName() + " is not supported on static fields: " + field);
}
return;
}
elements.add(new AnnotatedFieldElement(field, attributes));
}
}
});
return elements;
}
/**
* Finds {@link InjectionMetadata.InjectedElement} Metadata from annotated methods
*
* @param beanClass The {@link Class} of Bean
* @return non-null {@link List}
*/
private List<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> findAnnotatedMethodMetadata(
final Class<?> beanClass) {
final List<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> elements = new LinkedList<>();
ReflectionUtils.doWithMethods(beanClass, method -> {
Method bridgedMethod = findBridgedMethod(method);
if (!isVisibilityBridgeMethodPair(method, bridgedMethod)) {
return;
}
if (method.getAnnotation(Bean.class) != null) {
// DO NOT inject to Java-config class's @Bean method
return;
}
for (Class<? extends Annotation> annotationType : getAnnotationTypes()) {
AnnotationAttributes attributes = AnnotationUtils.getAnnotationAttributes(
bridgedMethod, annotationType, getEnvironment(), true, true);
if (attributes != null && method.equals(ClassUtils.getMostSpecificMethod(method, beanClass))) {
if (Modifier.isStatic(method.getModifiers())) {
throw new IllegalStateException("When using @" + annotationType.getName()
+ " to inject interface proxy, it is not supported on static methods: " + method);
}
if (method.getParameterTypes().length != 1) {
throw new IllegalStateException("When using @" + annotationType.getName()
+ " to inject interface proxy, the method must have only one parameter: " + method);
}
PropertyDescriptor pd = BeanUtils.findPropertyForMethod(bridgedMethod, beanClass);
elements.add(new AnnotatedMethodElement(method, pd, attributes));
}
}
});
return elements;
}
private AbstractAnnotationBeanPostProcessor.AnnotatedInjectionMetadata buildAnnotatedMetadata(
final Class<?> beanClass) {
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> fieldElements =
findFieldAnnotationMetadata(beanClass);
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> methodElements =
findAnnotatedMethodMetadata(beanClass);
return new AnnotatedInjectionMetadata(beanClass, fieldElements, methodElements);
}
protected AnnotatedInjectionMetadata findInjectionMetadata(String beanName, Class<?> clazz, PropertyValues pvs) {
// Fall back to class name as cache key, for backwards compatibility with custom callers.
String cacheKey = (StringUtils.hasLength(beanName) ? beanName : clazz.getName());
// Quick check on the concurrent map first, with minimal locking.
AbstractAnnotationBeanPostProcessor.AnnotatedInjectionMetadata metadata =
this.injectionMetadataCache.get(cacheKey);
if (needsRefreshInjectionMetadata(metadata, clazz)) {
synchronized (this.injectionMetadataCache) {
metadata = this.injectionMetadataCache.get(cacheKey);
if (needsRefreshInjectionMetadata(metadata, clazz)) {
if (metadata != null) {
metadata.clear(pvs);
}
try {
metadata = buildAnnotatedMetadata(clazz);
this.injectionMetadataCache.put(cacheKey, metadata);
} catch (NoClassDefFoundError err) {
throw new IllegalStateException(
"Failed to introspect object class [" + clazz.getName()
+ "] for annotation metadata: could not find class that it depends on",
err);
}
}
}
}
return metadata;
}
// Use custom check method to compatible with Spring 4.x
private boolean needsRefreshInjectionMetadata(AnnotatedInjectionMetadata metadata, Class<?> clazz) {
return (metadata == null || metadata.needsRefresh(clazz));
}
@Override
public void destroy() throws Exception {
injectionMetadataCache.clear();
if (logger.isInfoEnabled()) {
logger.info(getClass() + " was destroying!");
}
}
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
protected Environment getEnvironment() {
return environment;
}
protected ClassLoader getClassLoader() {
return classLoader;
}
protected ConfigurableListableBeanFactory getBeanFactory() {
return beanFactory;
}
/**
* Get injected-object from specified {@link AnnotationAttributes annotation attributes} and Bean Class
*
* @param attributes {@link AnnotationAttributes the annotation attributes}
* @param bean Current bean that will be injected
* @param beanName Current bean name that will be injected
* @param injectedType the type of injected-object
* @param injectedElement {@link AnnotatedInjectElement}
* @return An injected object
* @throws Exception If getting is failed
*/
protected Object getInjectedObject(
AnnotationAttributes attributes,
Object bean,
String beanName,
Class<?> injectedType,
AnnotatedInjectElement injectedElement)
throws Exception {
return doGetInjectedBean(attributes, bean, beanName, injectedType, injectedElement);
}
/**
* Prepare injection data after found injection elements
*
* @param metadata
* @throws Exception
*/
protected void prepareInjection(AnnotatedInjectionMetadata metadata) throws Exception {}
/**
* Subclass must implement this method to get injected-object. The context objects could help this method if
* necessary :
* <ul>
* <li>{@link #getBeanFactory() BeanFactory}</li>
* <li>{@link #getClassLoader() ClassLoader}</li>
* <li>{@link #getEnvironment() Environment}</li>
* </ul>
*
* @param attributes {@link AnnotationAttributes the annotation attributes}
* @param bean Current bean that will be injected
* @param beanName Current bean name that will be injected
* @param injectedType the type of injected-object
* @param injectedElement {@link AnnotatedInjectElement}
* @return The injected object
* @throws Exception If resolving an injected object is failed.
*/
protected abstract Object doGetInjectedBean(
AnnotationAttributes attributes,
Object bean,
String beanName,
Class<?> injectedType,
AnnotatedInjectElement injectedElement)
throws Exception;
@Override
public Object postProcessBeforeInstantiation(Class<?> beanClass, String beanName) throws BeansException {
return null;
}
@Override
public boolean postProcessAfterInstantiation(Object bean, String beanName) throws BeansException {
return true;
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
/**
* {@link Annotation Annotated} {@link InjectionMetadata} implementation
*/
protected static class AnnotatedInjectionMetadata extends InjectionMetadata {
private Class<?> targetClass;
private final Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> fieldElements;
private final Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> methodElements;
public AnnotatedInjectionMetadata(
Class<?> targetClass,
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> fieldElements,
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> methodElements) {
super(targetClass, combine(fieldElements, methodElements));
this.targetClass = targetClass;
this.fieldElements = fieldElements;
this.methodElements = methodElements;
}
public Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> getFieldElements() {
return fieldElements;
}
public Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> getMethodElements() {
return methodElements;
}
// @Override // since Spring 5.2.4
protected boolean needsRefresh(Class<?> clazz) {
if (this.targetClass == clazz) {
return false;
}
// IGNORE Spring CGLIB enhanced class
if (targetClass.isAssignableFrom(clazz) && clazz.getName().contains("$$EnhancerBySpringCGLIB$$")) {
return false;
}
return true;
}
}
/**
* {@link Annotation Annotated} {@link Method} {@link InjectionMetadata.InjectedElement}
*/
protected class AnnotatedInjectElement extends InjectionMetadata.InjectedElement {
protected final AnnotationAttributes attributes;
protected volatile Object injectedObject;
private Class<?> injectedType;
protected AnnotatedInjectElement(Member member, PropertyDescriptor pd, AnnotationAttributes attributes) {
super(member, pd);
this.attributes = attributes;
}
@Override
protected void inject(Object bean, String beanName, PropertyValues pvs) throws Throwable {
Object injectedObject = getInjectedObject(attributes, bean, beanName, getInjectedType(), this);
if (member instanceof Field) {
Field field = (Field) member;
ReflectionUtils.makeAccessible(field);
field.set(bean, injectedObject);
} else if (member instanceof Method) {
Method method = (Method) member;
ReflectionUtils.makeAccessible(method);
method.invoke(bean, injectedObject);
}
}
public Class<?> getInjectedType() throws ClassNotFoundException {
if (injectedType == null) {
if (this.isField) {
injectedType = ((Field) this.member).getType();
} else if (this.pd != null) {
return this.pd.getPropertyType();
} else {
Method method = (Method) this.member;
if (method.getParameterTypes().length > 0) {
injectedType = method.getParameterTypes()[0];
} else {
throw new IllegalStateException("get injected type failed");
}
}
}
return injectedType;
}
public String getPropertyName() {
if (member instanceof Field) {
Field field = (Field) member;
return field.getName();
} else if (this.pd != null) {
// If it is method element, using propertyName of PropertyDescriptor
return pd.getName();
} else {
Method method = (Method) this.member;
return method.getName();
}
}
}
protected class AnnotatedMethodElement extends AnnotatedInjectElement {
protected final Method method;
protected AnnotatedMethodElement(Method method, PropertyDescriptor pd, AnnotationAttributes attributes) {
super(method, pd, attributes);
this.method = method;
}
}
public class AnnotatedFieldElement extends AnnotatedInjectElement {
protected final Field field;
protected AnnotatedFieldElement(Field field, AnnotationAttributes attributes) {
super(field, null, attributes);
this.field = field;
}
}
}
| 8,809 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ServicePackagesHolder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.beans.factory.annotation;
import java.util.HashSet;
import java.util.Set;
/**
* A temp holder for scanned packages of service.
*/
public class ServicePackagesHolder {
public static final String BEAN_NAME = "dubboServicePackagesHolder";
private final Set<String> scannedPackages = new HashSet<>();
private final Set<String> scannedClasses = new HashSet<>();
public void addScannedPackage(String apackage) {
apackage = normalizePackage(apackage);
synchronized (scannedPackages) {
scannedPackages.add(apackage);
}
}
public boolean isPackageScanned(String packageName) {
packageName = normalizePackage(packageName);
synchronized (scannedPackages) {
if (scannedPackages.contains(packageName)) {
return true;
}
for (String scannedPackage : scannedPackages) {
if (isSubPackage(packageName, scannedPackage)) {
return true;
}
}
}
return false;
}
public void addScannedClass(String className) {
synchronized (scannedClasses) {
scannedClasses.add(className);
}
}
public boolean isClassScanned(String className) {
synchronized (scannedClasses) {
return scannedClasses.contains(className);
}
}
/**
* Whether test package is sub package of parent package
* @param testPkg
* @param parent
* @return
*/
private boolean isSubPackage(String testPkg, String parent) {
// child pkg startsWith parent pkg
return testPkg.startsWith(parent);
}
private String normalizePackage(String apackage) {
if (!apackage.endsWith(".")) {
apackage += ".";
}
return apackage;
}
}
| 8,810 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceBeanSupport.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.reference;
import org.apache.dubbo.common.utils.Assert;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.annotation.ProvidedBy;
import org.apache.dubbo.config.spring.Constants;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import org.apache.dubbo.config.spring.util.DubboAnnotationUtils;
import org.apache.dubbo.rpc.service.GenericService;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.PropertyValue;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.config.TypedStringValue;
import org.springframework.beans.factory.support.AbstractBeanFactory;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.context.ApplicationContext;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.util.ObjectUtils;
import static org.apache.dubbo.common.utils.StringUtils.join;
public class ReferenceBeanSupport {
private static final List<String> IGNORED_ATTRS = Arrays.asList(
ReferenceAttributes.ID,
ReferenceAttributes.GROUP,
ReferenceAttributes.VERSION,
ReferenceAttributes.INTERFACE,
ReferenceAttributes.INTERFACE_NAME,
ReferenceAttributes.INTERFACE_CLASS);
public static void convertReferenceProps(Map<String, Object> attributes, Class defaultInterfaceClass) {
// interface class
String interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE);
if (interfaceName == null) {
interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE_NAME);
}
if (interfaceName == null) {
Object interfaceClassValue = attributes.get(ReferenceAttributes.INTERFACE_CLASS);
if (interfaceClassValue instanceof Class) {
interfaceName = ((Class<?>) interfaceClassValue).getName();
} else if (interfaceClassValue instanceof String) {
if (interfaceClassValue.equals("void")) {
attributes.remove(ReferenceAttributes.INTERFACE_CLASS);
} else {
interfaceName = (String) interfaceClassValue;
}
}
}
if (interfaceName == null && defaultInterfaceClass != GenericService.class) {
interfaceName = defaultInterfaceClass.getName();
}
Assert.notEmptyString(interfaceName, "The interface class or name of reference was not found");
ProvidedBy providedbBy = null;
if (defaultInterfaceClass != null) {
providedbBy = (ProvidedBy) defaultInterfaceClass.getAnnotation(ProvidedBy.class);
}
if (providedbBy != null && providedbBy.name() != null && providedbBy.name().length > 0) {
int providedByReferenceLength = providedbBy.name().length;
Object providedByServices = attributes.get(ReferenceAttributes.PROVIDED_BY);
int providedByInterfaceLength = 0;
String[] providedByInterfaceServices = null;
if (providedByServices != null) {
providedByInterfaceLength = ((String[]) providedByServices).length;
providedByInterfaceServices = (String[]) providedByServices;
}
String[] providedbByServices = new String[providedByReferenceLength + providedByInterfaceLength];
System.arraycopy(providedbBy.name(), 0, providedbByServices, 0, providedByReferenceLength);
if (providedByInterfaceLength > 0) {
System.arraycopy(
providedByInterfaceServices,
0,
providedbByServices,
providedByReferenceLength,
providedByInterfaceLength);
}
attributes.put(ReferenceAttributes.PROVIDED_BY, providedbByServices);
}
attributes.put(ReferenceAttributes.INTERFACE, interfaceName);
attributes.remove(ReferenceAttributes.INTERFACE_NAME);
attributes.remove(ReferenceAttributes.INTERFACE_CLASS);
// reset generic value
String generic = String.valueOf(defaultInterfaceClass == GenericService.class);
String oldGeneric = attributes.containsValue(ReferenceAttributes.GENERIC)
? String.valueOf(attributes.get(ReferenceAttributes.GENERIC))
: "false";
if (!StringUtils.isEquals(oldGeneric, generic)) {
attributes.put(ReferenceAttributes.GENERIC, generic);
}
// Specially convert @DubboReference attribute name/value to ReferenceConfig property
// String[] registry => String registryIds
String[] registryIds = (String[]) attributes.get(ReferenceAttributes.REGISTRY);
if (registryIds != null) {
String value = join(registryIds, ",");
attributes.remove(ReferenceAttributes.REGISTRY);
attributes.put(ReferenceAttributes.REGISTRY_IDS, value);
}
}
public static String generateReferenceKey(Map<String, Object> attributes, ApplicationContext applicationContext) {
String interfaceClass = (String) attributes.get(ReferenceAttributes.INTERFACE);
Assert.notEmptyString(interfaceClass, "No interface class or name found from attributes");
String group = (String) attributes.get(ReferenceAttributes.GROUP);
String version = (String) attributes.get(ReferenceAttributes.VERSION);
// ReferenceBean:group/interface:version
StringBuilder beanNameBuilder = new StringBuilder("ReferenceBean:");
if (StringUtils.isNotEmpty(group)) {
beanNameBuilder.append(group).append('/');
}
beanNameBuilder.append(interfaceClass);
if (StringUtils.isNotEmpty(version)) {
beanNameBuilder.append(':').append(version);
}
// append attributes
beanNameBuilder.append('(');
// sort attributes keys
List<String> sortedAttrKeys = new ArrayList<>(attributes.keySet());
Collections.sort(sortedAttrKeys);
for (String key : sortedAttrKeys) {
if (IGNORED_ATTRS.contains(key)) {
continue;
}
Object value = attributes.get(key);
value = convertToString(key, value);
beanNameBuilder.append(key).append('=').append(value).append(',');
}
// replace the latest "," to be ")"
if (beanNameBuilder.charAt(beanNameBuilder.length() - 1) == ',') {
beanNameBuilder.setCharAt(beanNameBuilder.length() - 1, ')');
} else {
beanNameBuilder.append(')');
}
String referenceKey = beanNameBuilder.toString();
if (applicationContext != null) {
// resolve placeholder with Spring Environment
referenceKey = applicationContext.getEnvironment().resolvePlaceholders(referenceKey);
// resolve placeholder with Spring BeanFactory ( using
// PropertyResourceConfigurer/PropertySourcesPlaceholderConfigurer )
referenceKey = ((AbstractBeanFactory) applicationContext.getAutowireCapableBeanFactory())
.resolveEmbeddedValue(referenceKey);
}
return referenceKey;
}
private static String convertToString(String key, Object obj) {
if (obj == null) {
return null;
}
if (ReferenceAttributes.PARAMETERS.equals(key) && obj instanceof String[]) {
// convert parameters array pairs to map
obj = DubboAnnotationUtils.convertParameters((String[]) obj);
}
// to string
if (obj instanceof Annotation) {
AnnotationAttributes attributes = AnnotationUtils.getAnnotationAttributes((Annotation) obj, true);
for (Map.Entry<String, Object> entry : attributes.entrySet()) {
entry.setValue(convertToString(entry.getKey(), entry.getValue()));
}
return String.valueOf(attributes);
} else if (obj.getClass().isArray()) {
Object[] array = ObjectUtils.toObjectArray(obj);
String[] newArray = new String[array.length];
for (int i = 0; i < array.length; i++) {
newArray[i] = convertToString(null, array[i]);
}
Arrays.sort(newArray);
return Arrays.toString(newArray);
} else if (obj instanceof Map) {
Map<String, Object> map = (Map<String, Object>) obj;
TreeMap newMap = new TreeMap();
for (Map.Entry<String, Object> entry : map.entrySet()) {
newMap.put(entry.getKey(), convertToString(entry.getKey(), entry.getValue()));
}
return String.valueOf(newMap);
} else {
return String.valueOf(obj);
}
}
/**
* Convert to raw props, without parsing nested config objects
*/
public static Map<String, Object> convertPropertyValues(MutablePropertyValues propertyValues) {
Map<String, Object> referenceProps = new LinkedHashMap<>();
for (PropertyValue propertyValue : propertyValues.getPropertyValueList()) {
String propertyName = propertyValue.getName();
Object value = propertyValue.getValue();
if (ReferenceAttributes.METHODS.equals(propertyName)
|| ReferenceAttributes.ARGUMENTS.equals(propertyName)) {
ManagedList managedList = (ManagedList) value;
List<Map<String, Object>> elementList = new ArrayList<>();
for (Object el : managedList) {
Map<String, Object> element = convertPropertyValues(
((BeanDefinitionHolder) el).getBeanDefinition().getPropertyValues());
element.remove(ReferenceAttributes.ID);
elementList.add(element);
}
value = elementList.toArray(new Object[0]);
} else if (ReferenceAttributes.PARAMETERS.equals(propertyName)) {
value = createParameterMap((ManagedMap) value);
}
// convert ref
if (value instanceof RuntimeBeanReference) {
RuntimeBeanReference beanReference = (RuntimeBeanReference) value;
value = beanReference.getBeanName();
}
if (value == null || (value instanceof String && StringUtils.isBlank((String) value))) {
// ignore null or blank string
continue;
}
referenceProps.put(propertyName, value);
}
return referenceProps;
}
private static Map<String, String> createParameterMap(ManagedMap managedMap) {
Map<String, String> map = new LinkedHashMap<>();
Set<Map.Entry<String, TypedStringValue>> entrySet = managedMap.entrySet();
for (Map.Entry<String, TypedStringValue> entry : entrySet) {
map.put(entry.getKey(), entry.getValue().getValue());
}
return map;
}
public static String generateReferenceKey(ReferenceBean referenceBean, ApplicationContext applicationContext) {
return generateReferenceKey(getReferenceAttributes(referenceBean), applicationContext);
}
public static String generateReferenceKey(BeanDefinition beanDefinition, ApplicationContext applicationContext) {
return generateReferenceKey(getReferenceAttributes(beanDefinition), applicationContext);
}
public static Map<String, Object> getReferenceAttributes(ReferenceBean referenceBean) {
Map<String, Object> referenceProps = referenceBean.getReferenceProps();
if (referenceProps == null) {
MutablePropertyValues propertyValues = referenceBean.getPropertyValues();
if (propertyValues == null) {
throw new RuntimeException(
"ReferenceBean is invalid, 'referenceProps' and 'propertyValues' cannot both be empty.");
}
referenceProps = convertPropertyValues(propertyValues);
}
return referenceProps;
}
public static Map<String, Object> getReferenceAttributes(BeanDefinition beanDefinition) {
Map<String, Object> referenceProps = null;
if (beanDefinition.hasAttribute(Constants.REFERENCE_PROPS)) {
referenceProps = (Map<String, Object>) beanDefinition.getAttribute(Constants.REFERENCE_PROPS);
} else {
referenceProps = convertPropertyValues(beanDefinition.getPropertyValues());
}
return referenceProps;
}
}
| 8,811 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceBeanBuilder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.reference;
import org.apache.dubbo.config.ConsumerConfig;
import org.apache.dubbo.config.MethodConfig;
import org.apache.dubbo.config.MonitorConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.dubbo.config.spring.ReferenceBean;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* <p>
* Builder for ReferenceBean, used to return ReferenceBean instance in Java-config @Bean method,
* equivalent to {@link DubboReference} annotation.
* </p>
*
* <p>
* <b>It is recommended to use {@link DubboReference} on the @Bean method in the Java-config class.</b>
* </p>
*
* Step 1: Register ReferenceBean in Java-config class:
* <pre class="code">
* @Configuration
* public class ReferenceConfiguration {
*
* @Bean
* public ReferenceBean<HelloService> helloService() {
* return new ReferenceBeanBuilder()
* .setGroup("demo")
* .build();
* }
*
* @Bean
* public ReferenceBean<HelloService> helloService2() {
* return new ReferenceBean();
* }
*
* @Bean
* public ReferenceBean<GenericService> genericHelloService() {
* return new ReferenceBeanBuilder()
* .setGroup("demo")
* .setInterface(HelloService.class)
* .build();
* }
*
* }
* </pre>
*
* Step 2: Inject ReferenceBean by @Autowired
* <pre class="code">
* public class FooController {
* @Autowired
* private HelloService helloService;
*
* @Autowired
* private GenericService genericHelloService;
* }
* </pre>
*
* @see org.apache.dubbo.config.annotation.DubboReference
* @see org.apache.dubbo.config.spring.ReferenceBean
*/
public class ReferenceBeanBuilder {
private Map<String, Object> attributes = new HashMap<>();
public <T> ReferenceBean<T> build() {
return new ReferenceBean(attributes);
}
public ReferenceBeanBuilder setServices(String services) {
attributes.put(ReferenceAttributes.SERVICES, services);
return this;
}
public ReferenceBeanBuilder setInterface(String interfaceName) {
attributes.put(ReferenceAttributes.INTERFACE_NAME, interfaceName);
return this;
}
public ReferenceBeanBuilder setInterface(Class interfaceClass) {
attributes.put(ReferenceAttributes.INTERFACE_CLASS, interfaceClass);
return this;
}
public ReferenceBeanBuilder setClient(String client) {
attributes.put(ReferenceAttributes.CLIENT, client);
return this;
}
public ReferenceBeanBuilder setUrl(String url) {
attributes.put(ReferenceAttributes.URL, url);
return this;
}
public ReferenceBeanBuilder setConsumer(ConsumerConfig consumer) {
attributes.put(ReferenceAttributes.CONSUMER, consumer);
return this;
}
public ReferenceBeanBuilder setConsumer(String consumer) {
attributes.put(ReferenceAttributes.CONSUMER, consumer);
return this;
}
public ReferenceBeanBuilder setProtocol(String protocol) {
attributes.put(ReferenceAttributes.PROTOCOL, protocol);
return this;
}
public ReferenceBeanBuilder setCheck(Boolean check) {
attributes.put(ReferenceAttributes.CHECK, check);
return this;
}
public ReferenceBeanBuilder setInit(Boolean init) {
attributes.put(ReferenceAttributes.INIT, init);
return this;
}
// @Deprecated
public ReferenceBeanBuilder setGeneric(Boolean generic) {
attributes.put(ReferenceAttributes.GENERIC, generic);
return this;
}
/**
* @param injvm
* @deprecated instead, use the parameter <b>scope</b> to judge if it's in jvm, scope=local
*/
@Deprecated
public ReferenceBeanBuilder setInjvm(Boolean injvm) {
attributes.put(ReferenceAttributes.INJVM, injvm);
return this;
}
public ReferenceBeanBuilder setListener(String listener) {
attributes.put(ReferenceAttributes.LISTENER, listener);
return this;
}
public ReferenceBeanBuilder setLazy(Boolean lazy) {
attributes.put(ReferenceAttributes.LAZY, lazy);
return this;
}
public ReferenceBeanBuilder setOnconnect(String onconnect) {
attributes.put(ReferenceAttributes.ONCONNECT, onconnect);
return this;
}
public ReferenceBeanBuilder setOndisconnect(String ondisconnect) {
attributes.put(ReferenceAttributes.ONDISCONNECT, ondisconnect);
return this;
}
public ReferenceBeanBuilder setReconnect(String reconnect) {
attributes.put(ReferenceAttributes.RECONNECT, reconnect);
return this;
}
public ReferenceBeanBuilder setSticky(Boolean sticky) {
attributes.put(ReferenceAttributes.STICKY, sticky);
return this;
}
public ReferenceBeanBuilder setVersion(String version) {
attributes.put(ReferenceAttributes.VERSION, version);
return this;
}
public ReferenceBeanBuilder setGroup(String group) {
attributes.put(ReferenceAttributes.GROUP, group);
return this;
}
public ReferenceBeanBuilder setProvidedBy(String providedBy) {
attributes.put(ReferenceAttributes.PROVIDED_BY, providedBy);
return this;
}
public ReferenceBeanBuilder setProviderPort(Integer providerPort) {
attributes.put(ReferenceAttributes.PROVIDER_PORT, providerPort);
return this;
}
// public ReferenceBeanBuilder setRouter(String router) {
// attributes.put(ReferenceAttributes.ROUTER, router);
// return this;
// }
public ReferenceBeanBuilder setStub(String stub) {
attributes.put(ReferenceAttributes.STUB, stub);
return this;
}
public ReferenceBeanBuilder setCluster(String cluster) {
attributes.put(ReferenceAttributes.CLUSTER, cluster);
return this;
}
public ReferenceBeanBuilder setProxy(String proxy) {
attributes.put(ReferenceAttributes.PROXY, proxy);
return this;
}
public ReferenceBeanBuilder setConnections(Integer connections) {
attributes.put(ReferenceAttributes.CONNECTIONS, connections);
return this;
}
public ReferenceBeanBuilder setFilter(String filter) {
attributes.put(ReferenceAttributes.FILTER, filter);
return this;
}
public ReferenceBeanBuilder setLayer(String layer) {
attributes.put(ReferenceAttributes.LAYER, layer);
return this;
}
// @Deprecated
// public ReferenceBeanBuilder setApplication(ApplicationConfig application) {
// attributes.put(ReferenceAttributes.APPLICATION, application);
// return this;
// }
// @Deprecated
// public ReferenceBeanBuilder setModule(ModuleConfig module) {
// attributes.put(ReferenceAttributes.MODULE, module);
// return this;
// }
public ReferenceBeanBuilder setRegistry(String[] registryIds) {
attributes.put(ReferenceAttributes.REGISTRY, registryIds);
return this;
}
public ReferenceBeanBuilder setRegistry(RegistryConfig registry) {
setRegistries(Arrays.asList(registry));
return this;
}
public ReferenceBeanBuilder setRegistries(List<? extends RegistryConfig> registries) {
attributes.put(ReferenceAttributes.REGISTRIES, registries);
return this;
}
public ReferenceBeanBuilder setMethods(List<? extends MethodConfig> methods) {
attributes.put(ReferenceAttributes.METHODS, methods);
return this;
}
@Deprecated
public ReferenceBeanBuilder setMonitor(MonitorConfig monitor) {
attributes.put(ReferenceAttributes.MONITOR, monitor);
return this;
}
@Deprecated
public ReferenceBeanBuilder setMonitor(String monitor) {
attributes.put(ReferenceAttributes.MONITOR, monitor);
return this;
}
public ReferenceBeanBuilder setOwner(String owner) {
attributes.put(ReferenceAttributes.OWNER, owner);
return this;
}
public ReferenceBeanBuilder setCallbacks(Integer callbacks) {
attributes.put(ReferenceAttributes.CALLBACKS, callbacks);
return this;
}
public ReferenceBeanBuilder setScope(String scope) {
attributes.put(ReferenceAttributes.SCOPE, scope);
return this;
}
public ReferenceBeanBuilder setTag(String tag) {
attributes.put(ReferenceAttributes.TAG, tag);
return this;
}
public ReferenceBeanBuilder setTimeout(Integer timeout) {
attributes.put(ReferenceAttributes.TIMEOUT, timeout);
return this;
}
public ReferenceBeanBuilder setRetries(Integer retries) {
attributes.put(ReferenceAttributes.RETRIES, retries);
return this;
}
public ReferenceBeanBuilder setLoadBalance(String loadbalance) {
attributes.put(ReferenceAttributes.LOAD_BALANCE, loadbalance);
return this;
}
public ReferenceBeanBuilder setAsync(Boolean async) {
attributes.put(ReferenceAttributes.ASYNC, async);
return this;
}
public ReferenceBeanBuilder setActives(Integer actives) {
attributes.put(ReferenceAttributes.ACTIVES, actives);
return this;
}
public ReferenceBeanBuilder setSent(Boolean sent) {
attributes.put(ReferenceAttributes.SENT, sent);
return this;
}
public ReferenceBeanBuilder setMock(String mock) {
attributes.put(ReferenceAttributes.MOCK, mock);
return this;
}
public ReferenceBeanBuilder setMerger(String merger) {
attributes.put(ReferenceAttributes.MERGER, merger);
return this;
}
public ReferenceBeanBuilder setCache(String cache) {
attributes.put(ReferenceAttributes.CACHE, cache);
return this;
}
public ReferenceBeanBuilder setValidation(String validation) {
attributes.put(ReferenceAttributes.VALIDATION, validation);
return this;
}
public ReferenceBeanBuilder setParameters(Map<String, String> parameters) {
attributes.put(ReferenceAttributes.PARAMETERS, parameters);
return this;
}
// public ReferenceBeanBuilder setAuth(Boolean auth) {
// attributes.put(ReferenceAttributes.AUTH, auth);
// return this;
// }
//
// public ReferenceBeanBuilder setForks(Integer forks) {
// attributes.put(ReferenceAttributes.FORKS, forks);
// return this;
// }
//
// @Deprecated
// public ReferenceBeanBuilder setConfigCenter(ConfigCenterConfig configCenter) {
// attributes.put(ReferenceAttributes.CONFIG_CENTER, configCenter);
// return this;
// }
//
// @Deprecated
// public ReferenceBeanBuilder setMetadataReportConfig(MetadataReportConfig metadataReportConfig) {
// attributes.put(ReferenceAttributes.METADATA_REPORT_CONFIG, metadataReportConfig);
// return this;
// }
//
// @Deprecated
// public ReferenceBeanBuilder setMetrics(MetricsConfig metrics) {
// attributes.put(ReferenceAttributes.METRICS, metrics);
// return this;
// }
}
| 8,812 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceBeanManager.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.reference;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.Assert;
import org.apache.dubbo.common.utils.ConcurrentHashMapUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.ReferenceConfig;
import org.apache.dubbo.config.spring.ReferenceBean;
import org.apache.dubbo.config.spring.util.DubboBeanUtils;
import org.apache.dubbo.rpc.model.ModuleModel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_DUBBO_BEAN_INITIALIZER;
public class ReferenceBeanManager implements ApplicationContextAware {
public static final String BEAN_NAME = "dubboReferenceBeanManager";
private final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
// reference key -> reference bean names
private ConcurrentMap<String, List<String>> referenceKeyMap = new ConcurrentHashMap<>();
// reference alias -> reference bean name
private ConcurrentMap<String, String> referenceAliasMap = new ConcurrentHashMap<>();
// reference bean name -> ReferenceBean
private ConcurrentMap<String, ReferenceBean> referenceBeanMap = new ConcurrentHashMap<>();
// reference key -> ReferenceConfig instance
private ConcurrentMap<String, ReferenceConfig> referenceConfigMap = new ConcurrentHashMap<>();
private ApplicationContext applicationContext;
private volatile boolean initialized = false;
private ModuleModel moduleModel;
public void addReference(ReferenceBean referenceBean) throws Exception {
String referenceBeanName = referenceBean.getId();
Assert.notEmptyString(referenceBeanName, "The id of ReferenceBean cannot be empty");
if (!initialized) {
// TODO add issue url to describe early initialization
logger.warn(
CONFIG_DUBBO_BEAN_INITIALIZER,
"",
"",
"Early initialize reference bean before DubboConfigBeanInitializer,"
+ " the BeanPostProcessor has not been loaded at this time, which may cause abnormalities in some components (such as seata): "
+ referenceBeanName
+ " = " + ReferenceBeanSupport.generateReferenceKey(referenceBean, applicationContext));
}
String referenceKey = getReferenceKeyByBeanName(referenceBeanName);
if (StringUtils.isEmpty(referenceKey)) {
referenceKey = ReferenceBeanSupport.generateReferenceKey(referenceBean, applicationContext);
}
ReferenceBean oldReferenceBean = referenceBeanMap.get(referenceBeanName);
if (oldReferenceBean != null) {
if (referenceBean != oldReferenceBean) {
String oldReferenceKey =
ReferenceBeanSupport.generateReferenceKey(oldReferenceBean, applicationContext);
throw new IllegalStateException("Found duplicated ReferenceBean with id: " + referenceBeanName
+ ", old: " + oldReferenceKey + ", new: " + referenceKey);
}
return;
}
referenceBeanMap.put(referenceBeanName, referenceBean);
// save cache, map reference key to referenceBeanName
this.registerReferenceKeyAndBeanName(referenceKey, referenceBeanName);
// if add reference after prepareReferenceBeans(), should init it immediately.
if (initialized) {
initReferenceBean(referenceBean);
}
}
private String getReferenceKeyByBeanName(String referenceBeanName) {
Set<Map.Entry<String, List<String>>> entries = referenceKeyMap.entrySet();
for (Map.Entry<String, List<String>> entry : entries) {
if (entry.getValue().contains(referenceBeanName)) {
return entry.getKey();
}
}
return null;
}
public void registerReferenceKeyAndBeanName(String referenceKey, String referenceBeanNameOrAlias) {
List<String> list =
ConcurrentHashMapUtils.computeIfAbsent(referenceKeyMap, referenceKey, (key) -> new ArrayList<>());
if (!list.contains(referenceBeanNameOrAlias)) {
list.add(referenceBeanNameOrAlias);
// register bean name as alias
referenceAliasMap.put(referenceBeanNameOrAlias, list.get(0));
}
}
public ReferenceBean getById(String referenceBeanNameOrAlias) {
String referenceBeanName = transformName(referenceBeanNameOrAlias);
return referenceBeanMap.get(referenceBeanName);
}
// convert reference name/alias to referenceBeanName
private String transformName(String referenceBeanNameOrAlias) {
return referenceAliasMap.getOrDefault(referenceBeanNameOrAlias, referenceBeanNameOrAlias);
}
public List<String> getBeanNamesByKey(String key) {
return Collections.unmodifiableList(referenceKeyMap.getOrDefault(key, Collections.EMPTY_LIST));
}
public Collection<ReferenceBean> getReferences() {
return new HashSet<>(referenceBeanMap.values());
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
moduleModel = DubboBeanUtils.getModuleModel(applicationContext);
}
/**
* Initialize all reference beans, call at Dubbo starting
*
* @throws Exception
*/
public void prepareReferenceBeans() throws Exception {
initialized = true;
for (ReferenceBean referenceBean : getReferences()) {
initReferenceBean(referenceBean);
}
}
/**
* NOTE: This method should only call after all dubbo config beans and all property resolvers is loaded.
*
* @param referenceBean
* @throws Exception
*/
public synchronized void initReferenceBean(ReferenceBean referenceBean) throws Exception {
if (referenceBean.getReferenceConfig() != null) {
return;
}
// TOTO check same unique service name but difference reference key (means difference attributes).
// reference key
String referenceKey = getReferenceKeyByBeanName(referenceBean.getId());
if (StringUtils.isEmpty(referenceKey)) {
referenceKey = ReferenceBeanSupport.generateReferenceKey(referenceBean, applicationContext);
}
ReferenceConfig referenceConfig = referenceConfigMap.get(referenceKey);
if (referenceConfig == null) {
// create real ReferenceConfig
Map<String, Object> referenceAttributes = ReferenceBeanSupport.getReferenceAttributes(referenceBean);
referenceConfig = ReferenceCreator.create(referenceAttributes, applicationContext)
.defaultInterfaceClass(referenceBean.getObjectType())
.build();
// set id if it is not a generated name
if (referenceBean.getId() != null && !referenceBean.getId().contains("#")) {
referenceConfig.setId(referenceBean.getId());
}
// cache referenceConfig
referenceConfigMap.put(referenceKey, referenceConfig);
// register ReferenceConfig
moduleModel.getConfigManager().addReference(referenceConfig);
moduleModel.getDeployer().setPending();
}
// associate referenceConfig to referenceBean
referenceBean.setKeyAndReferenceConfig(referenceKey, referenceConfig);
}
}
| 8,813 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceCreator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.reference;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.config.AbstractConfig;
import org.apache.dubbo.config.ArgumentConfig;
import org.apache.dubbo.config.ConsumerConfig;
import org.apache.dubbo.config.MethodConfig;
import org.apache.dubbo.config.ModuleConfig;
import org.apache.dubbo.config.MonitorConfig;
import org.apache.dubbo.config.ReferenceConfig;
import org.apache.dubbo.config.annotation.Argument;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.dubbo.config.annotation.Method;
import org.apache.dubbo.config.spring.beans.factory.annotation.AnnotationPropertyValuesAdapter;
import org.apache.dubbo.config.spring.util.AnnotationUtils;
import org.apache.dubbo.config.spring.util.DubboAnnotationUtils;
import org.apache.dubbo.config.spring.util.DubboBeanUtils;
import org.apache.dubbo.config.spring.util.ObjectUtils;
import org.apache.dubbo.rpc.model.ModuleModel;
import java.util.Map;
import org.springframework.beans.propertyeditors.StringTrimmerEditor;
import org.springframework.context.ApplicationContext;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import org.springframework.validation.DataBinder;
/**
* {@link ReferenceConfig} Creator for @{@link DubboReference}
*
* @since 3.0
*/
public class ReferenceCreator {
// Ignore those fields
static final String[] IGNORE_FIELD_NAMES =
ObjectUtils.of("application", "module", "consumer", "monitor", "registry", "interfaceClass");
private static final String ONRETURN = "onreturn";
private static final String ONTHROW = "onthrow";
private static final String ONINVOKE = "oninvoke";
private static final String ISRETURN = "isReturn";
private static final String METHOD = "Method";
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected final Map<String, Object> attributes;
protected final ApplicationContext applicationContext;
protected final ClassLoader classLoader;
protected Class<?> defaultInterfaceClass;
private final ModuleModel moduleModel;
private ReferenceCreator(Map<String, Object> attributes, ApplicationContext applicationContext) {
Assert.notNull(attributes, "The Annotation attributes must not be null!");
Assert.notNull(applicationContext, "The ApplicationContext must not be null!");
this.attributes = attributes;
this.applicationContext = applicationContext;
this.classLoader = applicationContext.getClassLoader() != null
? applicationContext.getClassLoader()
: Thread.currentThread().getContextClassLoader();
moduleModel = DubboBeanUtils.getModuleModel(applicationContext);
Assert.notNull(moduleModel, "ModuleModel not found in Spring ApplicationContext");
}
public final ReferenceConfig build() throws Exception {
ReferenceConfig configBean = new ReferenceConfig();
configureBean(configBean);
if (logger.isInfoEnabled()) {
logger.info("The configBean[type:" + configBean.getClass().getSimpleName() + "<"
+ defaultInterfaceClass.getTypeName() + ">" + "] has been built.");
}
return configBean;
}
protected void configureBean(ReferenceConfig referenceConfig) throws Exception {
populateBean(referenceConfig);
configureMonitorConfig(referenceConfig);
configureModuleConfig(referenceConfig);
configureConsumerConfig(referenceConfig);
}
private void configureMonitorConfig(ReferenceConfig configBean) {
String monitorConfigId = AnnotationUtils.getAttribute(attributes, "monitor");
if (StringUtils.hasText(monitorConfigId)) {
MonitorConfig monitorConfig = getConfig(monitorConfigId, MonitorConfig.class);
configBean.setMonitor(monitorConfig);
}
}
private void configureModuleConfig(ReferenceConfig configBean) {
String moduleConfigId = AnnotationUtils.getAttribute(attributes, "module");
if (StringUtils.hasText(moduleConfigId)) {
ModuleConfig moduleConfig = getConfig(moduleConfigId, ModuleConfig.class);
configBean.setModule(moduleConfig);
}
}
private void configureConsumerConfig(ReferenceConfig<?> referenceBean) {
ConsumerConfig consumerConfig = null;
Object consumer = AnnotationUtils.getAttribute(attributes, "consumer");
if (consumer != null) {
if (consumer instanceof String) {
consumerConfig = getConfig((String) consumer, ConsumerConfig.class);
} else if (consumer instanceof ConsumerConfig) {
consumerConfig = (ConsumerConfig) consumer;
} else {
throw new IllegalArgumentException("Unexpected 'consumer' attribute value: " + consumer);
}
referenceBean.setConsumer(consumerConfig);
}
}
private <T extends AbstractConfig> T getConfig(String configIdOrName, Class<T> configType) {
// 1. find in ModuleConfigManager
T config = moduleModel
.getConfigManager()
.getConfig(configType, configIdOrName)
.orElse(null);
if (config == null) {
// 2. find in Spring ApplicationContext
if (applicationContext.containsBean(configIdOrName)) {
config = applicationContext.getBean(configIdOrName, configType);
}
}
if (config == null) {
throw new IllegalArgumentException(configType.getSimpleName() + " not found: " + configIdOrName);
}
return config;
}
protected void populateBean(ReferenceConfig referenceConfig) {
Assert.notNull(defaultInterfaceClass, "The default interface class cannot be empty!");
// convert attributes, e.g. interface, registry
ReferenceBeanSupport.convertReferenceProps(attributes, defaultInterfaceClass);
DataBinder dataBinder = new DataBinder(referenceConfig);
// Register CustomEditors for special fields
dataBinder.registerCustomEditor(String.class, "filter", new StringTrimmerEditor(true));
dataBinder.registerCustomEditor(String.class, "listener", new StringTrimmerEditor(true));
DefaultConversionService conversionService = new DefaultConversionService();
// convert String[] to Map (such as @Method.parameters())
conversionService.addConverter(String[].class, Map.class, DubboAnnotationUtils::convertParameters);
// convert Map to MethodConfig
conversionService.addConverter(
Map.class, MethodConfig.class, source -> createMethodConfig(source, conversionService));
// convert @Method to MethodConfig
conversionService.addConverter(Method.class, MethodConfig.class, source -> {
Map<String, Object> methodAttributes = AnnotationUtils.getAnnotationAttributes(source, true);
return createMethodConfig(methodAttributes, conversionService);
});
// convert Map to ArgumentConfig
conversionService.addConverter(Map.class, ArgumentConfig.class, source -> {
ArgumentConfig argumentConfig = new ArgumentConfig();
DataBinder argDataBinder = new DataBinder(argumentConfig);
argDataBinder.setConversionService(conversionService);
argDataBinder.bind(new AnnotationPropertyValuesAdapter(source, applicationContext.getEnvironment()));
return argumentConfig;
});
// convert @Argument to ArgumentConfig
conversionService.addConverter(Argument.class, ArgumentConfig.class, source -> {
ArgumentConfig argumentConfig = new ArgumentConfig();
DataBinder argDataBinder = new DataBinder(argumentConfig);
argDataBinder.setConversionService(conversionService);
argDataBinder.bind(new AnnotationPropertyValuesAdapter(source, applicationContext.getEnvironment()));
return argumentConfig;
});
// Bind annotation attributes
dataBinder.setConversionService(conversionService);
dataBinder.bind(new AnnotationPropertyValuesAdapter(
attributes, applicationContext.getEnvironment(), IGNORE_FIELD_NAMES));
}
private MethodConfig createMethodConfig(
Map<String, Object> methodAttributes, DefaultConversionService conversionService) {
String[] callbacks = new String[] {ONINVOKE, ONRETURN, ONTHROW};
for (String callbackName : callbacks) {
Object value = methodAttributes.get(callbackName);
if (value instanceof String) {
// parse callback: beanName.methodName
String strValue = (String) value;
int index = strValue.lastIndexOf(".");
if (index != -1) {
String beanName = strValue.substring(0, index);
String methodName = strValue.substring(index + 1);
methodAttributes.put(callbackName, applicationContext.getBean(beanName));
methodAttributes.put(callbackName + METHOD, methodName);
} else {
methodAttributes.put(callbackName, applicationContext.getBean(strValue));
}
}
}
MethodConfig methodConfig = new MethodConfig();
DataBinder mcDataBinder = new DataBinder(methodConfig);
methodConfig.setReturn((Boolean) methodAttributes.get(ISRETURN));
mcDataBinder.setConversionService(conversionService);
AnnotationPropertyValuesAdapter propertyValues =
new AnnotationPropertyValuesAdapter(methodAttributes, applicationContext.getEnvironment());
mcDataBinder.bind(propertyValues);
return methodConfig;
}
public static ReferenceCreator create(Map<String, Object> attributes, ApplicationContext applicationContext) {
return new ReferenceCreator(attributes, applicationContext);
}
public ReferenceCreator defaultInterfaceClass(Class<?> interfaceClass) {
this.defaultInterfaceClass = interfaceClass;
return this;
}
}
| 8,814 |
0 |
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring
|
Create_ds/dubbo/dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceAttributes.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.reference;
/**
* Attribute names of {@link org.apache.dubbo.config.annotation.DubboReference}
* and {@link org.apache.dubbo.config.ReferenceConfig}
*/
public interface ReferenceAttributes {
String ID = "id";
String INTERFACE = "interface";
String INTERFACE_NAME = "interfaceName";
String INTERFACE_CLASS = "interfaceClass";
String ACTUAL_INTERFACE = "actualInterface";
String GENERIC = "generic";
String REGISTRY = "registry";
String REGISTRIES = "registries";
String REGISTRY_IDS = "registryIds";
String GROUP = "group";
String VERSION = "version";
String ARGUMENTS = "arguments";
String METHODS = "methods";
String PARAMETERS = "parameters";
String PROVIDED_BY = "providedBy";
String PROVIDER_PORT = "providerPort";
String URL = "url";
String CLIENT = "client";
// /**
// * When enable, prefer to call local service in the same JVM if it's present, default value is true
// * @deprecated using scope="local" or scope="remote" instead
// */
// @Deprecated
String INJVM = "injvm";
String CHECK = "check";
String INIT = "init";
String LAZY = "lazy";
String STUBEVENT = "stubevent";
String RECONNECT = "reconnect";
String STICKY = "sticky";
String PROXY = "proxy";
String STUB = "stub";
String CLUSTER = "cluster";
String CONNECTIONS = "connections";
String CALLBACKS = "callbacks";
String ONCONNECT = "onconnect";
String ONDISCONNECT = "ondisconnect";
String OWNER = "owner";
String LAYER = "layer";
String RETRIES = "retries";
String LOAD_BALANCE = "loadbalance";
String ASYNC = "async";
String ACTIVES = "actives";
String SENT = "sent";
String MOCK = "mock";
String VALIDATION = "validation";
String TIMEOUT = "timeout";
String CACHE = "cache";
String FILTER = "filter";
String LISTENER = "listener";
String APPLICATION = "application";
String MODULE = "module";
String CONSUMER = "consumer";
String MONITOR = "monitor";
String PROTOCOL = "protocol";
String TAG = "tag";
String MERGER = "merger";
String SERVICES = "services";
String SCOPE = "scope";
}
| 8,815 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize/hessian2/TrustedPojo.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import java.io.Serializable;
import java.util.Objects;
public class TrustedPojo implements Serializable {
private final double data;
public TrustedPojo(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedPojo that = (TrustedPojo) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,816 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize/hessian2/TypeMatchTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.DataInput;
import org.apache.dubbo.common.serialize.DataOutput;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
class TypeMatchTest {
static class DataProvider implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext extensionContext) throws Exception {
List<Object> datas = new LinkedList<>();
List<Method> readMethods = new LinkedList<>();
List<Method> writeMethods = new LinkedList<>();
datas.add(true);
datas.add(false);
datas.add((byte) 123);
datas.add((byte) 234);
datas.add((short) 12345);
datas.add((short) 23456);
datas.add(123456);
datas.add(234567);
datas.add(1234567L);
datas.add(2345678L);
datas.add(0.123F);
datas.add(1.234F);
datas.add(0.1234D);
datas.add(1.2345D);
datas.add("hello");
datas.add("world");
datas.add("hello".getBytes());
datas.add("world".getBytes());
for (Method method : ObjectInput.class.getMethods()) {
if (method.getName().startsWith("read")
&& method.getParameterTypes().length == 0
&& !method.getReturnType().equals(Object.class)) {
readMethods.add(method);
}
}
for (Method method : DataInput.class.getMethods()) {
if (method.getName().startsWith("read")
&& method.getParameterTypes().length == 0
&& !method.getReturnType().equals(Object.class)) {
readMethods.add(method);
}
}
for (Method method : ObjectOutput.class.getMethods()) {
if (method.getName().startsWith("write")
&& method.getParameterTypes().length == 1
&& !method.getParameterTypes()[0].equals(Object.class)) {
writeMethods.add(method);
}
}
for (Method method : DataOutput.class.getMethods()) {
if (method.getName().startsWith("write")
&& method.getParameterTypes().length == 1
&& !method.getParameterTypes()[0].equals(Object.class)) {
writeMethods.add(method);
}
}
Map<Class<?>, Class<?>> primitiveWrapperTypeMap = new HashMap<>(16);
primitiveWrapperTypeMap.put(Boolean.class, boolean.class);
primitiveWrapperTypeMap.put(Byte.class, byte.class);
primitiveWrapperTypeMap.put(Character.class, char.class);
primitiveWrapperTypeMap.put(Double.class, double.class);
primitiveWrapperTypeMap.put(Float.class, float.class);
primitiveWrapperTypeMap.put(Integer.class, int.class);
primitiveWrapperTypeMap.put(Long.class, long.class);
primitiveWrapperTypeMap.put(Short.class, short.class);
primitiveWrapperTypeMap.put(Void.class, void.class);
List<Arguments> argumentsList = new LinkedList<>();
for (Object data : datas) {
for (Method input : readMethods) {
for (Method output : writeMethods) {
if (output.getParameterTypes()[0].isAssignableFrom(data.getClass())) {
argumentsList.add(Arguments.arguments(data, input, output));
}
if (primitiveWrapperTypeMap.containsKey(data.getClass())
&& output.getParameterTypes()[0].isAssignableFrom(
primitiveWrapperTypeMap.get(data.getClass()))) {
argumentsList.add(Arguments.arguments(data, input, output));
}
}
}
}
return argumentsList.stream();
}
}
@ParameterizedTest
@ArgumentsSource(DataProvider.class)
void test(Object data, Method input, Method output) throws Exception {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
output.invoke(objectOutput, data);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
if (output.getParameterTypes()[0].equals(input.getReturnType())) {
Object result = input.invoke(objectInput);
if (data.getClass().isArray()) {
Assertions.assertArrayEquals((byte[]) data, (byte[]) result);
} else {
Assertions.assertEquals(data, result);
}
} else {
try {
Object result = input.invoke(objectInput);
if (data.getClass().isArray()) {
Assertions.assertNotEquals(data.getClass(), result.getClass());
} else {
Assertions.assertNotEquals(data, result);
}
} catch (Exception e) {
// ignore
}
}
frameworkModel.destroy();
}
}
| 8,817 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize/hessian2/TrustedNotSerializable.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import java.util.Objects;
public class TrustedNotSerializable {
private final double data;
public TrustedNotSerializable(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedNotSerializable that = (TrustedNotSerializable) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,818 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize/hessian2/TrustedPojo2.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import java.io.Serializable;
import java.util.Objects;
public class TrustedPojo2 implements Serializable {
private final double data;
public TrustedPojo2(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedPojo2 that = (TrustedPojo2) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,819 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/org/apache/dubbo/common/serialize/hessian2/Hessian2SerializationTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.common.utils.SerializeCheckStatus;
import org.apache.dubbo.common.utils.SerializeSecurityManager;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import com.example.test.TestPojo;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class Hessian2SerializationTest {
@Test
void testReadString() throws IOException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write string, read string
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject("hello");
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals("hello", objectInput.readUTF());
}
// write string, read string
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(null);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertNull(objectInput.readUTF());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new Date());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
frameworkModel.destroy();
}
@Test
void testReadEvent() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write string, read event
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject("hello");
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals("hello", objectInput.readEvent());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new Date());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
frameworkModel.destroy();
}
@Test
void testReadByte() throws IOException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write byte, read byte
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject((byte) 11);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals((byte) 11, objectInput.readByte());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new Date());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
frameworkModel.destroy();
}
@Test
void testReadObject() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write pojo, read pojo
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject());
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject());
}
// write pojo, read pojo
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject(TrustedPojo.class));
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject(List.class));
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject(LinkedList.class));
}
frameworkModel.destroy();
}
@Test
void testReadObjectNotMatched() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write pojo, read list failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(List.class));
}
// write pojo, read list failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(LinkedList.class));
}
// write pojo, read string failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(String.class));
}
// write pojo, read other failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(TrustedNotSerializable.class));
}
// write pojo, read same field failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(TrustedPojo2.class, objectInput.readObject(TrustedPojo2.class));
}
// write pojo, read map failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(Map.class, objectInput.readObject(Map.class));
}
// write list, read pojo failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(TrustedPojo.class));
}
// write list, read map failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(Map.class));
}
frameworkModel.destroy();
}
@Test
void testLimit1() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write trusted, read trusted
TrustedPojo trustedPojo = new TrustedPojo(ThreadLocalRandom.current().nextDouble());
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject());
frameworkModel.destroy();
}
@Test
void testLimit2() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckStatus(SerializeCheckStatus.STRICT);
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write untrusted failed
TestPojo trustedPojo = new TestPojo("12345");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
Assertions.assertThrows(IOException.class, () -> objectOutput.writeObject(trustedPojo));
frameworkModel.destroy();
}
@Test
void testLimit3() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write un-serializable failed
TrustedNotSerializable trustedPojo =
new TrustedNotSerializable(ThreadLocalRandom.current().nextDouble());
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
Assertions.assertThrows(IOException.class, () -> objectOutput.writeObject(trustedPojo));
frameworkModel.destroy();
}
@Test
void testLimit4() throws IOException, ClassNotFoundException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// write force untrusted, read failed
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
TestPojo trustedPojo = new TestPojo("12345");
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.addToAllowed(trustedPojo.getClass().getName());
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
frameworkModel.destroy();
}
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
byte[] bytes = outputStream.toByteArray();
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckStatus(SerializeCheckStatus.STRICT);
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(Map.class, objectInput.readObject());
frameworkModel.destroy();
}
}
@Test
void testLimit5() throws IOException, ClassNotFoundException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// write force un-serializable, read failed
{
System.setProperty("dubbo.hessian.allowNonSerializable", "true");
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
TrustedNotSerializable trustedPojo =
new TrustedNotSerializable(ThreadLocalRandom.current().nextDouble());
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckSerializable(false);
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
frameworkModel.destroy();
System.clearProperty("dubbo.hessian.allowNonSerializable");
}
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("hessian2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(Map.class, objectInput.readObject());
frameworkModel.destroy();
}
}
}
| 8,820 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/com/example
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/test/java/com/example/test/TestPojo.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.test;
import java.io.Serializable;
import java.util.Objects;
public class TestPojo implements Serializable {
private final String data;
public TestPojo(String data) {
this.data = data;
}
@Override
public String toString() {
throw new IllegalAccessError();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestPojo testPojo = (TestPojo) o;
return Objects.equals(data, testPojo.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,821 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2ClassLoaderListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.rpc.model.FrameworkModel;
import org.apache.dubbo.rpc.model.ScopeClassLoaderListener;
public class Hessian2ClassLoaderListener implements ScopeClassLoaderListener<FrameworkModel> {
@Override
public void onAddClassLoader(FrameworkModel scopeModel, ClassLoader classLoader) {
// noop
}
@Override
public void onRemoveClassLoader(FrameworkModel scopeModel, ClassLoader classLoader) {
Hessian2FactoryManager hessian2FactoryManager =
scopeModel.getBeanFactory().getBean(Hessian2FactoryManager.class);
hessian2FactoryManager.onRemoveClassLoader(classLoader);
}
}
| 8,822 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2ScopeModelInitializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.beans.factory.ScopeBeanFactory;
import org.apache.dubbo.rpc.model.ApplicationModel;
import org.apache.dubbo.rpc.model.FrameworkModel;
import org.apache.dubbo.rpc.model.ModuleModel;
import org.apache.dubbo.rpc.model.ScopeModelInitializer;
public class Hessian2ScopeModelInitializer implements ScopeModelInitializer {
@Override
public void initializeFrameworkModel(FrameworkModel frameworkModel) {
ScopeBeanFactory beanFactory = frameworkModel.getBeanFactory();
beanFactory.registerBean(Hessian2FactoryManager.class);
frameworkModel.addClassLoaderListener(new Hessian2ClassLoaderListener());
}
@Override
public void initializeApplicationModel(ApplicationModel applicationModel) {}
@Override
public void initializeModuleModel(ModuleModel moduleModel) {}
}
| 8,823 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2Serialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Optional;
import static org.apache.dubbo.common.serialize.Constants.HESSIAN2_SERIALIZATION_ID;
/**
* Hessian2 serialization implementation, hessian2 is the default serialization protocol for dubbo
*
* <pre>
* e.g. <dubbo:protocol serialization="hessian2" />
* </pre>
*/
public class Hessian2Serialization implements Serialization {
@Override
public byte getContentTypeId() {
return HESSIAN2_SERIALIZATION_ID;
}
@Override
public String getContentType() {
return "x-application/hessian2";
}
@Override
public ObjectOutput serialize(URL url, OutputStream out) throws IOException {
Hessian2FactoryManager hessian2FactoryManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Hessian2FactoryManager.class);
return new Hessian2ObjectOutput(out, hessian2FactoryManager);
}
@Override
public ObjectInput deserialize(URL url, InputStream is) throws IOException {
Hessian2FactoryManager hessian2FactoryManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Hessian2FactoryManager.class);
return new Hessian2ObjectInput(is, hessian2FactoryManager);
}
}
| 8,824 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2ObjectInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.serialize.Cleanable;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Type;
import com.alibaba.com.caucho.hessian.io.Hessian2Input;
/**
* Hessian2 object input implementation
*/
public class Hessian2ObjectInput implements ObjectInput, Cleanable {
private final Hessian2Input mH2i;
private final Hessian2FactoryManager hessian2FactoryManager;
@Deprecated
public Hessian2ObjectInput(InputStream is) {
mH2i = new Hessian2Input(is);
this.hessian2FactoryManager =
FrameworkModel.defaultModel().getBeanFactory().getOrRegisterBean(Hessian2FactoryManager.class);
mH2i.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
public Hessian2ObjectInput(InputStream is, Hessian2FactoryManager hessian2FactoryManager) {
mH2i = new Hessian2Input(is);
this.hessian2FactoryManager = hessian2FactoryManager;
mH2i.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
@Override
public boolean readBool() throws IOException {
return mH2i.readBoolean();
}
@Override
public byte readByte() throws IOException {
return (byte) mH2i.readInt();
}
@Override
public short readShort() throws IOException {
return (short) mH2i.readInt();
}
@Override
public int readInt() throws IOException {
return mH2i.readInt();
}
@Override
public long readLong() throws IOException {
return mH2i.readLong();
}
@Override
public float readFloat() throws IOException {
return (float) mH2i.readDouble();
}
@Override
public double readDouble() throws IOException {
return mH2i.readDouble();
}
@Override
public byte[] readBytes() throws IOException {
return mH2i.readBytes();
}
@Override
public String readUTF() throws IOException {
return mH2i.readString();
}
@Override
public Object readObject() throws IOException {
if (!mH2i.getSerializerFactory()
.getClassLoader()
.equals(Thread.currentThread().getContextClassLoader())) {
mH2i.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
return mH2i.readObject();
}
@Override
@SuppressWarnings("unchecked")
public <T> T readObject(Class<T> cls) throws IOException, ClassNotFoundException {
if (!mH2i.getSerializerFactory()
.getClassLoader()
.equals(Thread.currentThread().getContextClassLoader())) {
mH2i.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
return (T) mH2i.readObject(cls);
}
@Override
public <T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException {
if (!mH2i.getSerializerFactory()
.getClassLoader()
.equals(Thread.currentThread().getContextClassLoader())) {
mH2i.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
return readObject(cls);
}
public InputStream readInputStream() throws IOException {
return mH2i.readInputStream();
}
@Override
public void cleanup() {
if (mH2i != null) {
mH2i.reset();
}
}
}
| 8,825 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2FactoryManager.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.utils.ConcurrentHashMapUtils;
import org.apache.dubbo.common.utils.DefaultSerializeClassChecker;
import org.apache.dubbo.common.utils.SerializeCheckStatus;
import org.apache.dubbo.common.utils.SerializeSecurityManager;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.util.concurrent.ConcurrentHashMap;
import com.alibaba.com.caucho.hessian.io.SerializerFactory;
public class Hessian2FactoryManager {
String WHITELIST = "dubbo.application.hessian2.whitelist";
String ALLOW = "dubbo.application.hessian2.allow";
String DENY = "dubbo.application.hessian2.deny";
private volatile SerializerFactory SYSTEM_SERIALIZER_FACTORY;
private volatile SerializerFactory stickySerializerFactory = null;
private final ConcurrentHashMap<ClassLoader, SerializerFactory> CL_2_SERIALIZER_FACTORY = new ConcurrentHashMap<>();
private final SerializeSecurityManager serializeSecurityManager;
private final DefaultSerializeClassChecker defaultSerializeClassChecker;
public Hessian2FactoryManager(FrameworkModel frameworkModel) {
serializeSecurityManager = frameworkModel.getBeanFactory().getOrRegisterBean(SerializeSecurityManager.class);
defaultSerializeClassChecker =
frameworkModel.getBeanFactory().getOrRegisterBean(DefaultSerializeClassChecker.class);
}
public SerializerFactory getSerializerFactory(ClassLoader classLoader) {
SerializerFactory sticky = stickySerializerFactory;
if (sticky != null && sticky.getClassLoader().equals(classLoader)) {
return sticky;
}
if (classLoader == null) {
// system classloader
if (SYSTEM_SERIALIZER_FACTORY == null) {
synchronized (this) {
if (SYSTEM_SERIALIZER_FACTORY == null) {
SYSTEM_SERIALIZER_FACTORY = createSerializerFactory(null);
}
}
}
stickySerializerFactory = SYSTEM_SERIALIZER_FACTORY;
return SYSTEM_SERIALIZER_FACTORY;
}
SerializerFactory factory = ConcurrentHashMapUtils.computeIfAbsent(
CL_2_SERIALIZER_FACTORY, classLoader, this::createSerializerFactory);
stickySerializerFactory = factory;
return factory;
}
private SerializerFactory createSerializerFactory(ClassLoader classLoader) {
String whitelist = System.getProperty(WHITELIST);
if (StringUtils.isNotEmpty(whitelist)) {
return createWhiteListSerializerFactory(classLoader);
}
return createDefaultSerializerFactory(classLoader);
}
private SerializerFactory createDefaultSerializerFactory(ClassLoader classLoader) {
Hessian2SerializerFactory hessian2SerializerFactory =
new Hessian2SerializerFactory(classLoader, defaultSerializeClassChecker);
hessian2SerializerFactory.setAllowNonSerializable(
Boolean.parseBoolean(System.getProperty("dubbo.hessian.allowNonSerializable", "false")));
hessian2SerializerFactory.getClassFactory().allow("org.apache.dubbo.*");
return hessian2SerializerFactory;
}
public SerializerFactory createWhiteListSerializerFactory(ClassLoader classLoader) {
SerializerFactory serializerFactory = new Hessian2SerializerFactory(classLoader, defaultSerializeClassChecker);
String whiteList = System.getProperty(WHITELIST);
if ("true".equals(whiteList)) {
serializerFactory.getClassFactory().setWhitelist(true);
String allowPattern = System.getProperty(ALLOW);
if (StringUtils.isNotEmpty(allowPattern)) {
for (String pattern : allowPattern.split(";")) {
serializerFactory.getClassFactory().allow(pattern);
serializeSecurityManager.addToAlwaysAllowed(pattern);
}
}
serializeSecurityManager.setCheckStatus(SerializeCheckStatus.STRICT);
} else {
serializerFactory.getClassFactory().setWhitelist(false);
String denyPattern = System.getProperty(DENY);
if (StringUtils.isNotEmpty(denyPattern)) {
for (String pattern : denyPattern.split(";")) {
serializerFactory.getClassFactory().deny(pattern);
serializeSecurityManager.addToDisAllowed(pattern);
}
}
}
serializerFactory.setAllowNonSerializable(
Boolean.parseBoolean(System.getProperty("dubbo.hessian.allowNonSerializable", "false")));
serializerFactory.getClassFactory().allow("org.apache.dubbo.*");
return serializerFactory;
}
public void onRemoveClassLoader(ClassLoader classLoader) {
CL_2_SERIALIZER_FACTORY.remove(classLoader);
}
}
| 8,826 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2SerializerFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.utils.DefaultSerializeClassChecker;
import java.io.Serializable;
import com.alibaba.com.caucho.hessian.io.Deserializer;
import com.alibaba.com.caucho.hessian.io.JavaDeserializer;
import com.alibaba.com.caucho.hessian.io.JavaSerializer;
import com.alibaba.com.caucho.hessian.io.Serializer;
import com.alibaba.com.caucho.hessian.io.SerializerFactory;
public class Hessian2SerializerFactory extends SerializerFactory {
private final DefaultSerializeClassChecker defaultSerializeClassChecker;
public Hessian2SerializerFactory(
ClassLoader classLoader, DefaultSerializeClassChecker defaultSerializeClassChecker) {
super(classLoader);
this.defaultSerializeClassChecker = defaultSerializeClassChecker;
}
@Override
public Class<?> loadSerializedClass(String className) throws ClassNotFoundException {
return defaultSerializeClassChecker.loadClass(getClassLoader(), className);
}
@Override
protected Serializer getDefaultSerializer(Class cl) {
if (_defaultSerializer != null) return _defaultSerializer;
try {
// pre-check if class is allow
defaultSerializeClassChecker.loadClass(getClassLoader(), cl.getName());
} catch (ClassNotFoundException e) {
// ignore
}
checkSerializable(cl);
return new JavaSerializer(cl, getClassLoader());
}
@Override
protected Deserializer getDefaultDeserializer(Class cl) {
try {
// pre-check if class is allow
defaultSerializeClassChecker.loadClass(getClassLoader(), cl.getName());
} catch (ClassNotFoundException e) {
// ignore
}
checkSerializable(cl);
return new JavaDeserializer(cl);
}
private void checkSerializable(Class<?> cl) {
// If class is Serializable => ok
// If class has not implement Serializable
// If hessian check serializable => fail
// If dubbo class checker check serializable => fail
// If both hessian and dubbo class checker allow non-serializable => ok
if (!Serializable.class.isAssignableFrom(cl)
&& (!isAllowNonSerializable() || defaultSerializeClassChecker.isCheckSerializable())) {
throw new IllegalStateException(
"Serialized class " + cl.getName() + " must implement java.io.Serializable");
}
}
}
| 8,827 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-hessian2/src/main/java/org/apache/dubbo/common/serialize/hessian2/Hessian2ObjectOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.hessian2;
import org.apache.dubbo.common.serialize.Cleanable;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.IOException;
import java.io.OutputStream;
import com.alibaba.com.caucho.hessian.io.Hessian2Output;
/**
* Hessian2 object output implementation
*/
public class Hessian2ObjectOutput implements ObjectOutput, Cleanable {
private final Hessian2Output mH2o;
@Deprecated
public Hessian2ObjectOutput(OutputStream os) {
mH2o = new Hessian2Output(os);
Hessian2FactoryManager hessian2FactoryManager =
FrameworkModel.defaultModel().getBeanFactory().getOrRegisterBean(Hessian2FactoryManager.class);
mH2o.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
public Hessian2ObjectOutput(OutputStream os, Hessian2FactoryManager hessian2FactoryManager) {
mH2o = new Hessian2Output(os);
mH2o.setSerializerFactory(hessian2FactoryManager.getSerializerFactory(
Thread.currentThread().getContextClassLoader()));
}
@Override
public void writeBool(boolean v) throws IOException {
mH2o.writeBoolean(v);
}
@Override
public void writeByte(byte v) throws IOException {
mH2o.writeInt(v);
}
@Override
public void writeShort(short v) throws IOException {
mH2o.writeInt(v);
}
@Override
public void writeInt(int v) throws IOException {
mH2o.writeInt(v);
}
@Override
public void writeLong(long v) throws IOException {
mH2o.writeLong(v);
}
@Override
public void writeFloat(float v) throws IOException {
mH2o.writeDouble(v);
}
@Override
public void writeDouble(double v) throws IOException {
mH2o.writeDouble(v);
}
@Override
public void writeBytes(byte[] b) throws IOException {
mH2o.writeBytes(b);
}
@Override
public void writeBytes(byte[] b, int off, int len) throws IOException {
mH2o.writeBytes(b, off, len);
}
@Override
public void writeUTF(String v) throws IOException {
mH2o.writeString(v);
}
@Override
public void writeObject(Object obj) throws IOException {
mH2o.writeObject(obj);
}
@Override
public void flushBuffer() throws IOException {
mH2o.flushBuffer();
}
public OutputStream getOutputStream() throws IOException {
return mH2o.getBytesOutputStream();
}
@Override
public void cleanup() {
if (mH2o != null) {
mH2o.reset();
}
}
}
| 8,828 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/ObjectInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.Map;
/**
* Object input interface.
*/
public interface ObjectInput extends DataInput {
/**
* Consider use {@link #readObject(Class)} or {@link #readObject(Class, Type)} where possible
*
* @return object
* @throws IOException if an I/O error occurs
* @throws ClassNotFoundException if an ClassNotFoundException occurs
*/
Object readObject() throws IOException, ClassNotFoundException;
/**
* read object
*
* @param cls object class
* @return object
* @throws IOException if an I/O error occurs
* @throws ClassNotFoundException if an ClassNotFoundException occurs
*/
<T> T readObject(Class<T> cls) throws IOException, ClassNotFoundException;
/**
* read object
*
* @param cls object class
* @param type object type
* @return object
* @throws IOException if an I/O error occurs
* @throws ClassNotFoundException if an ClassNotFoundException occurs
*/
<T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException;
/**
* The following methods are customized for the requirement of Dubbo's RPC protocol implementation. Legacy protocol
* implementation will try to write Map, Throwable and Null value directly to the stream, which does not meet the
* restrictions of all serialization protocols.
*
* <p>
* See how ProtobufSerialization, KryoSerialization implemented these methods for more details.
* <p>
* <p>
* The binding of RPC protocol and biz serialization protocol is not a good practice. Encoding of RPC protocol
* should be highly independent and portable, easy to cross platforms and languages, for example, like the http headers,
* restricting the content of headers / attachments to Ascii strings and uses ISO_8859_1 to encode them.
* https://tools.ietf.org/html/rfc7540#section-8.1.2
*/
default Throwable readThrowable() throws IOException, ClassNotFoundException {
Object obj = readObject();
if (!(obj instanceof Throwable)) {
throw new IOException("Response data error, expect Throwable, but get " + obj.getClass());
}
return (Throwable) obj;
}
default String readEvent() throws IOException, ClassNotFoundException {
return readUTF();
}
default Map<String, Object> readAttachments() throws IOException, ClassNotFoundException {
return readObject(Map.class);
}
}
| 8,829 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/DefaultSerializationExceptionWrapper.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import org.apache.dubbo.common.URL;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Type;
import java.util.Map;
public class DefaultSerializationExceptionWrapper implements Serialization {
private final Serialization serialization;
public DefaultSerializationExceptionWrapper(Serialization serialization) {
if (serialization == null) {
throw new IllegalArgumentException("serialization == null");
}
this.serialization = serialization;
}
@Override
public byte getContentTypeId() {
return serialization.getContentTypeId();
}
@Override
public String getContentType() {
return serialization.getContentType();
}
@Override
public ObjectOutput serialize(URL url, OutputStream output) throws IOException {
ObjectOutput objectOutput = serialization.serialize(url, output);
return new ProxyObjectOutput(objectOutput);
}
@Override
public ObjectInput deserialize(URL url, InputStream input) throws IOException {
ObjectInput objectInput = serialization.deserialize(url, input);
return new ProxyObjectInput(objectInput);
}
static class ProxyObjectInput implements ObjectInput {
private final ObjectInput target;
public ProxyObjectInput(ObjectInput target) {
this.target = target;
}
@Override
public boolean readBool() throws IOException {
try {
return target.readBool();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public byte readByte() throws IOException {
try {
return target.readByte();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public short readShort() throws IOException {
try {
return target.readShort();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public int readInt() throws IOException {
try {
return target.readInt();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public long readLong() throws IOException {
try {
return target.readLong();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public float readFloat() throws IOException {
try {
return target.readFloat();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public double readDouble() throws IOException {
try {
return target.readDouble();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public String readUTF() throws IOException {
try {
return target.readUTF();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public byte[] readBytes() throws IOException {
try {
return target.readBytes();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public Object readObject() throws IOException, ClassNotFoundException {
try {
return target.readObject();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public <T> T readObject(Class<T> cls) throws IOException, ClassNotFoundException {
try {
return target.readObject(cls);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public <T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException {
try {
return target.readObject(cls, type);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public Throwable readThrowable() throws IOException {
try {
return target.readThrowable();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public String readEvent() throws IOException {
try {
return target.readEvent();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public Map<String, Object> readAttachments() throws IOException, ClassNotFoundException {
try {
return target.readAttachments();
} catch (Exception e) {
if (e instanceof ClassNotFoundException) {
throw e;
}
throw handleToIOException(e);
}
}
}
static class ProxyObjectOutput implements ObjectOutput {
private final ObjectOutput target;
public ProxyObjectOutput(ObjectOutput target) {
this.target = target;
}
@Override
public void writeBool(boolean v) throws IOException {
try {
target.writeBool(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeByte(byte v) throws IOException {
try {
target.writeByte(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeShort(short v) throws IOException {
try {
target.writeShort(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeInt(int v) throws IOException {
try {
target.writeInt(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeLong(long v) throws IOException {
try {
target.writeLong(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeFloat(float v) throws IOException {
try {
target.writeFloat(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeDouble(double v) throws IOException {
try {
target.writeDouble(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeUTF(String v) throws IOException {
try {
target.writeUTF(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeBytes(byte[] v) throws IOException {
try {
target.writeBytes(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeBytes(byte[] v, int off, int len) throws IOException {
try {
target.writeBytes(v);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void flushBuffer() throws IOException {
try {
target.flushBuffer();
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeObject(Object obj) throws IOException {
try {
target.writeObject(obj);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeThrowable(Throwable obj) throws IOException {
try {
target.writeThrowable(obj);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeEvent(String data) throws IOException {
try {
target.writeEvent(data);
} catch (Exception e) {
throw handleToIOException(e);
}
}
@Override
public void writeAttachments(Map<String, Object> attachments) throws IOException {
try {
target.writeAttachments(attachments);
} catch (Exception e) {
throw handleToIOException(e);
}
}
}
private static IOException handleToIOException(Exception e) {
if (!(e instanceof IOException)) {
return new IOException(new SerializationException(e));
}
return (IOException) e;
}
}
| 8,830 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/Serialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.extension.Adaptive;
import org.apache.dubbo.common.extension.ExtensionScope;
import org.apache.dubbo.common.extension.SPI;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Serialization strategy interface that specifies a serializer. (SPI, Singleton, ThreadSafe)
*
* The default extension is hessian2 and the default serialization implementation of the dubbo protocol.
* <pre>
* e.g. <dubbo:protocol serialization="xxx" />
* </pre>
*/
@SPI(scope = ExtensionScope.FRAMEWORK)
public interface Serialization {
/**
* Get content type unique id, recommended that custom implementations use values different with
* any value of {@link Constants} and don't greater than ExchangeCodec.SERIALIZATION_MASK (31)
* because dubbo protocol use 5 bits to record serialization ID in header.
*
* @return content type id
*/
byte getContentTypeId();
/**
* Get content type
*
* @return content type
*/
String getContentType();
/**
* Get a serialization implementation instance
*
* @param url URL address for the remote service
* @param output the underlying output stream
* @return serializer
* @throws IOException
*/
@Adaptive
ObjectOutput serialize(URL url, OutputStream output) throws IOException;
/**
* Get a deserialization implementation instance
*
* @param url URL address for the remote service
* @param input the underlying input stream
* @return deserializer
* @throws IOException
*/
@Adaptive
ObjectInput deserialize(URL url, InputStream input) throws IOException;
}
| 8,831 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/ObjectOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import java.io.IOException;
import java.util.Map;
/**
* Object output interface.
*/
public interface ObjectOutput extends DataOutput {
/**
* write object.
*
* @param obj object.
*/
void writeObject(Object obj) throws IOException;
/**
* The following methods are customized for the requirement of Dubbo's RPC protocol implementation. Legacy protocol
* implementation will try to write Map, Throwable and Null value directly to the stream, which does not meet the
* restrictions of all serialization protocols.
*
* <p>
* See how ProtobufSerialization, KryoSerialization implemented these methods for more details.
* <p>
*
* The binding of RPC protocol and biz serialization protocol is not a good practice. Encoding of RPC protocol
* should be highly independent and portable, easy to cross platforms and languages, for example, like the http headers,
* restricting the content of headers / attachments to Ascii strings and uses ISO_8859_1 to encode them.
* https://tools.ietf.org/html/rfc7540#section-8.1.2
*/
default void writeThrowable(Throwable obj) throws IOException {
writeObject(obj);
}
default void writeEvent(String data) throws IOException {
writeObject(data);
}
default void writeAttachments(Map<String, Object> attachments) throws IOException {
writeObject(attachments);
}
}
| 8,832 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/Cleanable.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
/**
* Interface defines that the object is cleanable.
*/
public interface Cleanable {
/**
* Implementations must implement this cleanup method
*/
void cleanup();
}
| 8,833 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/DefaultMultipleSerialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import org.apache.dubbo.common.URL;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class DefaultMultipleSerialization implements MultipleSerialization {
@Override
public void serialize(URL url, String serializeType, Class<?> clz, Object obj, OutputStream os) throws IOException {
serializeType = convertHessian(serializeType);
final Serialization serialization = url.getOrDefaultFrameworkModel()
.getExtensionLoader(Serialization.class)
.getExtension(serializeType);
final ObjectOutput serialize = serialization.serialize(null, os);
serialize.writeObject(obj);
serialize.flushBuffer();
}
@Override
public Object deserialize(URL url, String serializeType, Class<?> clz, InputStream os)
throws IOException, ClassNotFoundException {
serializeType = convertHessian(serializeType);
final Serialization serialization = url.getOrDefaultFrameworkModel()
.getExtensionLoader(Serialization.class)
.getExtension(serializeType);
final ObjectInput in = serialization.deserialize(null, os);
return in.readObject(clz);
}
private String convertHessian(String ser) {
if (ser.equals("hessian4")) {
return "hessian2";
}
return ser;
}
}
| 8,834 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/DataOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import java.io.IOException;
/**
* Basic data type output interface.
*/
public interface DataOutput {
/**
* Write boolean.
*
* @param v value.
* @throws IOException
*/
void writeBool(boolean v) throws IOException;
/**
* Write byte.
*
* @param v value.
* @throws IOException
*/
void writeByte(byte v) throws IOException;
/**
* Write short.
*
* @param v value.
* @throws IOException
*/
void writeShort(short v) throws IOException;
/**
* Write integer.
*
* @param v value.
* @throws IOException
*/
void writeInt(int v) throws IOException;
/**
* Write long.
*
* @param v value.
* @throws IOException
*/
void writeLong(long v) throws IOException;
/**
* Write float.
*
* @param v value.
* @throws IOException
*/
void writeFloat(float v) throws IOException;
/**
* Write double.
*
* @param v value.
* @throws IOException
*/
void writeDouble(double v) throws IOException;
/**
* Write string.
*
* @param v value.
* @throws IOException
*/
void writeUTF(String v) throws IOException;
/**
* Write byte array.
*
* @param v value.
* @throws IOException
*/
void writeBytes(byte[] v) throws IOException;
/**
* Write byte array.
*
* @param v value.
* @param off the start offset in the data.
* @param len the number of bytes that are written.
* @throws IOException
*/
void writeBytes(byte[] v, int off, int len) throws IOException;
/**
* Flush buffer.
*
* @throws IOException
*/
void flushBuffer() throws IOException;
}
| 8,835 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/SerializationException.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
/**
* Serialized runtime exceptions, internal flow,
* will be converted into general exceptions and added to serialization tags when returning to rpc
*/
public class SerializationException extends Exception {
private static final long serialVersionUID = -3160452149606778709L;
public SerializationException(String msg) {
super(msg);
}
public SerializationException(Throwable cause) {
super(cause);
}
}
| 8,836 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/DataInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import java.io.IOException;
/**
* Basic data type input interface.
*/
public interface DataInput {
/**
* Read boolean.
*
* @return boolean.
* @throws IOException
*/
boolean readBool() throws IOException;
/**
* Read byte.
*
* @return byte value.
* @throws IOException
*/
byte readByte() throws IOException;
/**
* Read short integer.
*
* @return short.
* @throws IOException
*/
short readShort() throws IOException;
/**
* Read integer.
*
* @return integer.
* @throws IOException
*/
int readInt() throws IOException;
/**
* Read long.
*
* @return long.
* @throws IOException
*/
long readLong() throws IOException;
/**
* Read float.
*
* @return float.
* @throws IOException
*/
float readFloat() throws IOException;
/**
* Read double.
*
* @return double.
* @throws IOException
*/
double readDouble() throws IOException;
/**
* Read UTF-8 string.
*
* @return string.
* @throws IOException
*/
String readUTF() throws IOException;
/**
* Read byte array.
*
* @return byte array.
* @throws IOException
*/
byte[] readBytes() throws IOException;
}
| 8,837 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/MultipleSerialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.extension.ExtensionScope;
import org.apache.dubbo.common.extension.SPI;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@SPI(scope = ExtensionScope.FRAMEWORK)
public interface MultipleSerialization {
void serialize(URL url, String serializeType, Class<?> clz, Object obj, OutputStream os) throws IOException;
Object deserialize(URL url, String serializeType, Class<?> clz, InputStream os)
throws IOException, ClassNotFoundException;
}
| 8,838 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/Constants.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize;
public interface Constants {
byte HESSIAN2_SERIALIZATION_ID = 2;
byte JAVA_SERIALIZATION_ID = 3;
byte COMPACTED_JAVA_SERIALIZATION_ID = 4;
byte FASTJSON_SERIALIZATION_ID = 6;
byte NATIVE_JAVA_SERIALIZATION_ID = 7;
byte KRYO_SERIALIZATION_ID = 8;
byte FST_SERIALIZATION_ID = 9;
byte NATIVE_HESSIAN_SERIALIZATION_ID = 10;
byte PROTOSTUFF_SERIALIZATION_ID = 12;
byte AVRO_SERIALIZATION_ID = 11;
byte GSON_SERIALIZATION_ID = 16;
byte PROTOBUF_JSON_SERIALIZATION_ID = 21;
byte PROTOBUF_SERIALIZATION_ID = 22;
byte FASTJSON2_SERIALIZATION_ID = 23;
byte KRYO_SERIALIZATION2_ID = 25;
byte CUSTOM_MESSAGE_PACK_ID = 31;
}
| 8,839 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/support/SerializableClassRegistry.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.support;
import java.util.HashMap;
import java.util.Map;
/**
* Provide a unified serialization registry, this class used for {@code dubbo-serialization-fst}
* and {@code dubbo-serialization-kryo}, it will register some classes at startup time (for example {@link AbstractKryoFactory#create})
*/
public abstract class SerializableClassRegistry {
private static final Map<Class<?>, Object> REGISTRATIONS = new HashMap<>();
/**
* only supposed to be called at startup time
*
* @param clazz object type
*/
public static void registerClass(Class<?> clazz) {
registerClass(clazz, null);
}
/**
* only supposed to be called at startup time
*
* @param clazz object type
* @param serializer object serializer
*/
public static void registerClass(Class<?> clazz, Object serializer) {
if (clazz == null) {
throw new IllegalArgumentException("Class registered to kryo cannot be null!");
}
REGISTRATIONS.put(clazz, serializer);
}
/**
* get registered classes
*
* @return class serializer
* */
public static Map<Class<?>, Object> getRegisteredClasses() {
return REGISTRATIONS;
}
}
| 8,840 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/support/DefaultSerializationSelector.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.support;
public class DefaultSerializationSelector {
private static final String DEFAULT_REMOTING_SERIALIZATION_PROPERTY_KEY = "DUBBO_DEFAULT_SERIALIZATION";
private static final String DEFAULT_REMOTING_SERIALIZATION_PROPERTY = "hessian2";
private static final String DEFAULT_REMOTING_SERIALIZATION;
static {
String fromProperty = System.getProperty(DEFAULT_REMOTING_SERIALIZATION_PROPERTY_KEY);
if (fromProperty != null) {
DEFAULT_REMOTING_SERIALIZATION = fromProperty;
} else {
String fromEnv = System.getenv(DEFAULT_REMOTING_SERIALIZATION_PROPERTY_KEY);
if (fromEnv != null) {
DEFAULT_REMOTING_SERIALIZATION = fromEnv;
} else {
DEFAULT_REMOTING_SERIALIZATION = DEFAULT_REMOTING_SERIALIZATION_PROPERTY;
}
}
}
public static String getDefaultRemotingSerialization() {
return DEFAULT_REMOTING_SERIALIZATION;
}
}
| 8,841 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/support/SerializationOptimizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.support;
import java.util.Collection;
/**
* Interface defining serialization optimizer, there are nothing implementations for now.
*/
public interface SerializationOptimizer {
/**
* Get serializable classes
*
* @return serializable classes
* */
Collection<Class<?>> getSerializableClasses();
}
| 8,842 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize/fastjson2/TrustedPojo.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import java.io.Serializable;
import java.util.Objects;
public class TrustedPojo implements Serializable {
private final double data;
public TrustedPojo(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedPojo that = (TrustedPojo) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,843 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize/fastjson2/FastJson2SerializationTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.common.utils.SerializeCheckStatus;
import org.apache.dubbo.common.utils.SerializeSecurityManager;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import com.example.test.TestPojo;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class FastJson2SerializationTest {
@Test
void testReadString() throws IOException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write string, read string
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject("hello");
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals("hello", objectInput.readUTF());
}
// write string, read string
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(null);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertNull(objectInput.readUTF());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(String.class, objectInput.readUTF());
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readUTF);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(String.class, objectInput.readUTF());
}
frameworkModel.destroy();
}
@Test
void testReadEvent() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write string, read event
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject("hello");
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals("hello", objectInput.readEvent());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(String.class, objectInput.readEvent());
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readEvent);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(String.class, objectInput.readEvent());
}
frameworkModel.destroy();
}
@Test
void testReadByte() throws IOException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write byte, read byte
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject((byte) 11);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals((byte) 11, objectInput.readByte());
}
// write date, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new Date());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write pojo, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new TrustedPojo(ThreadLocalRandom.current().nextDouble()));
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write map, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new HashMap<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
// write list, read failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(new LinkedList<>());
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readByte);
}
frameworkModel.destroy();
}
@Test
void testReadObject() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write pojo, read pojo
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject());
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject());
}
// write pojo, read pojo
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject(TrustedPojo.class));
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject(List.class));
}
// write list, read list
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(pojos, objectInput.readObject(LinkedList.class));
}
frameworkModel.destroy();
}
@Test
void testReadObjectNotMatched() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckStatus(SerializeCheckStatus.STRICT);
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write pojo, read list failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(List.class));
}
// write pojo, read list failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(LinkedList.class));
}
// write pojo, read string failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertInstanceOf(String.class, objectInput.readObject(String.class));
}
// write pojo, read other failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(TrustedNotSerializable.class));
}
// write pojo, read same field failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(TrustedPojo2.class));
}
// write pojo, read map failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(Map.class));
}
// write list, read pojo failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(TrustedPojo.class));
}
// write list, read map failed
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
TrustedPojo trustedPojo =
new TrustedPojo(ThreadLocalRandom.current().nextDouble());
LinkedList<TrustedPojo> pojos = new LinkedList<>();
pojos.add(trustedPojo);
objectOutput.writeObject(pojos);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, () -> objectInput.readObject(Map.class));
}
frameworkModel.destroy();
}
@Test
void testLimit1() throws IOException, ClassNotFoundException {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
// write trusted, read trusted
TrustedPojo trustedPojo = new TrustedPojo(ThreadLocalRandom.current().nextDouble());
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertEquals(trustedPojo, objectInput.readObject());
frameworkModel.destroy();
}
@Test
void testLimit4() throws IOException, ClassNotFoundException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// write force untrusted, read failed
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
TestPojo trustedPojo = new TestPojo("12345");
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.addToAllowed(trustedPojo.getClass().getName());
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
frameworkModel.destroy();
}
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
byte[] bytes = outputStream.toByteArray();
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckStatus(SerializeCheckStatus.STRICT);
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readObject);
frameworkModel.destroy();
}
}
@Test
void testLimit5() throws IOException, ClassNotFoundException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// write force un-serializable, read failed
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
TrustedNotSerializable trustedPojo =
new TrustedNotSerializable(ThreadLocalRandom.current().nextDouble());
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckSerializable(false);
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
objectOutput.writeObject(trustedPojo);
objectOutput.flushBuffer();
frameworkModel.destroy();
}
{
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
byte[] bytes = outputStream.toByteArray();
frameworkModel
.getBeanFactory()
.getBean(SerializeSecurityManager.class)
.setCheckStatus(SerializeCheckStatus.STRICT);
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
Assertions.assertThrows(IOException.class, objectInput::readObject);
frameworkModel.destroy();
}
}
}
| 8,844 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize/fastjson2/TypeMatchTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.DataInput;
import org.apache.dubbo.common.serialize.DataOutput;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
class TypeMatchTest {
static class DataProvider implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext extensionContext) throws Exception {
List<Object> datas = new LinkedList<>();
List<Method> readMethods = new LinkedList<>();
List<Method> writeMethods = new LinkedList<>();
datas.add(true);
datas.add(false);
datas.add((byte) 123);
datas.add((byte) 234);
datas.add((short) 12345);
datas.add((short) 23456);
datas.add(123456);
datas.add(234567);
datas.add(1234567L);
datas.add(2345678L);
datas.add(0.123F);
datas.add(1.234F);
datas.add(0.1234D);
datas.add(1.2345D);
datas.add("hello");
datas.add("world");
datas.add("hello".getBytes());
datas.add("world".getBytes());
for (Method method : ObjectInput.class.getMethods()) {
if (method.getName().startsWith("read")
&& method.getParameterTypes().length == 0
&& !method.getReturnType().equals(Object.class)) {
readMethods.add(method);
}
}
for (Method method : DataInput.class.getMethods()) {
if (method.getName().startsWith("read")
&& method.getParameterTypes().length == 0
&& !method.getReturnType().equals(Object.class)) {
readMethods.add(method);
}
}
for (Method method : ObjectOutput.class.getMethods()) {
if (method.getName().startsWith("write")
&& method.getParameterTypes().length == 1
&& !method.getParameterTypes()[0].equals(Object.class)) {
writeMethods.add(method);
}
}
for (Method method : DataOutput.class.getMethods()) {
if (method.getName().startsWith("write")
&& method.getParameterTypes().length == 1
&& !method.getParameterTypes()[0].equals(Object.class)) {
writeMethods.add(method);
}
}
Map<Class<?>, Class<?>> primitiveWrapperTypeMap = new HashMap<>(16);
primitiveWrapperTypeMap.put(Boolean.class, boolean.class);
primitiveWrapperTypeMap.put(Byte.class, byte.class);
primitiveWrapperTypeMap.put(Character.class, char.class);
primitiveWrapperTypeMap.put(Double.class, double.class);
primitiveWrapperTypeMap.put(Float.class, float.class);
primitiveWrapperTypeMap.put(Integer.class, int.class);
primitiveWrapperTypeMap.put(Long.class, long.class);
primitiveWrapperTypeMap.put(Short.class, short.class);
primitiveWrapperTypeMap.put(Void.class, void.class);
List<Arguments> argumentsList = new LinkedList<>();
for (Object data : datas) {
for (Method input : readMethods) {
for (Method output : writeMethods) {
if (output.getParameterTypes()[0].isAssignableFrom(data.getClass())) {
argumentsList.add(Arguments.arguments(data, input, output));
}
if (primitiveWrapperTypeMap.containsKey(data.getClass())
&& output.getParameterTypes()[0].isAssignableFrom(
primitiveWrapperTypeMap.get(data.getClass()))) {
argumentsList.add(Arguments.arguments(data, input, output));
}
}
}
}
return argumentsList.stream();
}
}
@ParameterizedTest
@ArgumentsSource(DataProvider.class)
void test(Object data, Method input, Method output) throws Exception {
FrameworkModel frameworkModel = new FrameworkModel();
Serialization serialization =
frameworkModel.getExtensionLoader(Serialization.class).getExtension("fastjson2");
URL url = URL.valueOf("").setScopeModel(frameworkModel);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ObjectOutput objectOutput = serialization.serialize(url, outputStream);
output.invoke(objectOutput, data);
objectOutput.flushBuffer();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ObjectInput objectInput = serialization.deserialize(url, inputStream);
if (output.getParameterTypes()[0].equals(input.getReturnType())) {
Object result = input.invoke(objectInput);
if (data.getClass().isArray()) {
Assertions.assertArrayEquals((byte[]) data, (byte[]) result);
} else {
Assertions.assertEquals(data, result);
}
} else {
try {
Object result = input.invoke(objectInput);
if (data.getClass().isArray()) {
Assertions.assertNotEquals(data.getClass(), result.getClass());
} else {
Assertions.assertNotEquals(data, result);
}
} catch (Exception e) {
// ignore
}
}
frameworkModel.destroy();
}
}
| 8,845 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize/fastjson2/TrustedNotSerializable.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import java.util.Objects;
public class TrustedNotSerializable {
private final double data;
public TrustedNotSerializable(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedNotSerializable that = (TrustedNotSerializable) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,846 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/org/apache/dubbo/common/serialize/fastjson2/TrustedPojo2.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import java.io.Serializable;
import java.util.Objects;
public class TrustedPojo2 implements Serializable {
private final double data;
public TrustedPojo2(double data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrustedPojo2 that = (TrustedPojo2) o;
return Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,847 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/com/example
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/test/java/com/example/test/TestPojo.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.test;
import java.io.Serializable;
import java.util.Objects;
public class TestPojo implements Serializable {
private final String data;
public TestPojo(String data) {
this.data = data;
}
@Override
public String toString() {
throw new IllegalAccessError();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestPojo testPojo = (TestPojo) o;
return Objects.equals(data, testPojo.data);
}
@Override
public int hashCode() {
return Objects.hash(data);
}
}
| 8,848 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/Fastjson2CreatorManager.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.rpc.model.FrameworkModel;
import org.apache.dubbo.rpc.model.ScopeClassLoaderListener;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.alibaba.fastjson2.JSONFactory;
import com.alibaba.fastjson2.reader.ObjectReaderCreatorASM;
import com.alibaba.fastjson2.writer.ObjectWriterCreatorASM;
public class Fastjson2CreatorManager implements ScopeClassLoaderListener<FrameworkModel> {
/**
* An empty classLoader used when classLoader is system classLoader. Prevent the NPE.
*/
private static final ClassLoader SYSTEM_CLASSLOADER_KEY = new ClassLoader() {};
private final Map<ClassLoader, ObjectReaderCreatorASM> readerMap = new ConcurrentHashMap<>();
private final Map<ClassLoader, ObjectWriterCreatorASM> writerMap = new ConcurrentHashMap<>();
public Fastjson2CreatorManager(FrameworkModel frameworkModel) {
frameworkModel.addClassLoaderListener(this);
}
public void setCreator(ClassLoader classLoader) {
if (classLoader == null) {
classLoader = SYSTEM_CLASSLOADER_KEY;
}
JSONFactory.setContextReaderCreator(readerMap.computeIfAbsent(classLoader, ObjectReaderCreatorASM::new));
JSONFactory.setContextWriterCreator(writerMap.computeIfAbsent(classLoader, ObjectWriterCreatorASM::new));
}
@Override
public void onAddClassLoader(FrameworkModel scopeModel, ClassLoader classLoader) {
// nop
}
@Override
public void onRemoveClassLoader(FrameworkModel scopeModel, ClassLoader classLoader) {
readerMap.remove(classLoader);
writerMap.remove(classLoader);
}
}
| 8,849 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/FastJson2Serialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Optional;
import static org.apache.dubbo.common.serialize.Constants.FASTJSON2_SERIALIZATION_ID;
/**
* FastJson serialization implementation
*
* <pre>
* e.g. <dubbo:protocol serialization="fastjson" />
* </pre>
*/
public class FastJson2Serialization implements Serialization {
@Override
public byte getContentTypeId() {
return FASTJSON2_SERIALIZATION_ID;
}
@Override
public String getContentType() {
return "text/jsonb";
}
@Override
public ObjectOutput serialize(URL url, OutputStream output) throws IOException {
Fastjson2CreatorManager fastjson2CreatorManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Fastjson2CreatorManager.class);
Fastjson2SecurityManager fastjson2SecurityManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Fastjson2SecurityManager.class);
return new FastJson2ObjectOutput(fastjson2CreatorManager, fastjson2SecurityManager, output);
}
@Override
public ObjectInput deserialize(URL url, InputStream input) throws IOException {
Fastjson2CreatorManager fastjson2CreatorManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Fastjson2CreatorManager.class);
Fastjson2SecurityManager fastjson2SecurityManager = Optional.ofNullable(url)
.map(URL::getOrDefaultFrameworkModel)
.orElseGet(FrameworkModel::defaultModel)
.getBeanFactory()
.getBean(Fastjson2SecurityManager.class);
return new FastJson2ObjectInput(fastjson2CreatorManager, fastjson2SecurityManager, input);
}
}
| 8,850 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/FastJson2ObjectOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.serialize.ObjectOutput;
import java.io.IOException;
import java.io.OutputStream;
import com.alibaba.fastjson2.JSONB;
import com.alibaba.fastjson2.JSONWriter;
/**
* FastJson object output implementation
*/
public class FastJson2ObjectOutput implements ObjectOutput {
private final Fastjson2CreatorManager fastjson2CreatorManager;
private final Fastjson2SecurityManager fastjson2SecurityManager;
private volatile ClassLoader classLoader;
private final OutputStream os;
public FastJson2ObjectOutput(
Fastjson2CreatorManager fastjson2CreatorManager,
Fastjson2SecurityManager fastjson2SecurityManager,
OutputStream out) {
this.fastjson2CreatorManager = fastjson2CreatorManager;
this.fastjson2SecurityManager = fastjson2SecurityManager;
this.classLoader = Thread.currentThread().getContextClassLoader();
this.os = out;
fastjson2CreatorManager.setCreator(classLoader);
}
@Override
public void writeBool(boolean v) throws IOException {
writeObject(v);
}
@Override
public void writeByte(byte v) throws IOException {
writeObject(v);
}
@Override
public void writeShort(short v) throws IOException {
writeObject(v);
}
@Override
public void writeInt(int v) throws IOException {
writeObject(v);
}
@Override
public void writeLong(long v) throws IOException {
writeObject(v);
}
@Override
public void writeFloat(float v) throws IOException {
writeObject(v);
}
@Override
public void writeDouble(double v) throws IOException {
writeObject(v);
}
@Override
public void writeUTF(String v) throws IOException {
writeObject(v);
}
@Override
public void writeBytes(byte[] b) throws IOException {
os.write(b.length);
os.write(b);
}
@Override
public void writeBytes(byte[] b, int off, int len) throws IOException {
os.write(len);
os.write(b, off, len);
}
@Override
public void writeObject(Object obj) throws IOException {
updateClassLoaderIfNeed();
byte[] bytes;
if (fastjson2SecurityManager.getSecurityFilter().isCheckSerializable()) {
bytes = JSONB.toBytes(
obj,
JSONWriter.Feature.WriteClassName,
JSONWriter.Feature.FieldBased,
JSONWriter.Feature.ErrorOnNoneSerializable,
JSONWriter.Feature.ReferenceDetection,
JSONWriter.Feature.WriteNulls,
JSONWriter.Feature.NotWriteDefaultValue,
JSONWriter.Feature.NotWriteHashMapArrayListClassName,
JSONWriter.Feature.WriteNameAsSymbol);
} else {
bytes = JSONB.toBytes(
obj,
JSONWriter.Feature.WriteClassName,
JSONWriter.Feature.FieldBased,
JSONWriter.Feature.ReferenceDetection,
JSONWriter.Feature.WriteNulls,
JSONWriter.Feature.NotWriteDefaultValue,
JSONWriter.Feature.NotWriteHashMapArrayListClassName,
JSONWriter.Feature.WriteNameAsSymbol);
}
writeLength(bytes.length);
os.write(bytes);
os.flush();
}
private void updateClassLoaderIfNeed() {
ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
if (currentClassLoader != classLoader) {
fastjson2CreatorManager.setCreator(currentClassLoader);
classLoader = currentClassLoader;
}
}
private void writeLength(int value) throws IOException {
byte[] bytes = new byte[Integer.BYTES];
int length = bytes.length;
for (int i = 0; i < length; i++) {
bytes[length - i - 1] = (byte) (value & 0xFF);
value >>= 8;
}
os.write(bytes);
}
@Override
public void flushBuffer() throws IOException {
os.flush();
}
}
| 8,851 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/FastJson2ObjectInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.utils.ClassUtils;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Type;
import com.alibaba.fastjson2.JSONB;
import com.alibaba.fastjson2.JSONReader;
/**
* FastJson object input implementation
*/
public class FastJson2ObjectInput implements ObjectInput {
private final Fastjson2CreatorManager fastjson2CreatorManager;
private final Fastjson2SecurityManager fastjson2SecurityManager;
private volatile ClassLoader classLoader;
private final InputStream is;
public FastJson2ObjectInput(
Fastjson2CreatorManager fastjson2CreatorManager,
Fastjson2SecurityManager fastjson2SecurityManager,
InputStream in) {
this.fastjson2CreatorManager = fastjson2CreatorManager;
this.fastjson2SecurityManager = fastjson2SecurityManager;
this.classLoader = Thread.currentThread().getContextClassLoader();
this.is = in;
fastjson2CreatorManager.setCreator(classLoader);
}
@Override
public boolean readBool() throws IOException {
return readObject(boolean.class);
}
@Override
public byte readByte() throws IOException {
return readObject(byte.class);
}
@Override
public short readShort() throws IOException {
return readObject(short.class);
}
@Override
public int readInt() throws IOException {
return readObject(int.class);
}
@Override
public long readLong() throws IOException {
return readObject(long.class);
}
@Override
public float readFloat() throws IOException {
return readObject(float.class);
}
@Override
public double readDouble() throws IOException {
return readObject(double.class);
}
@Override
public String readUTF() throws IOException {
return readObject(String.class);
}
@Override
public byte[] readBytes() throws IOException {
int length = is.read();
byte[] bytes = new byte[length];
int read = is.read(bytes, 0, length);
if (read != length) {
throw new IllegalArgumentException(
"deserialize failed. expected read length: " + length + " but actual read: " + read);
}
return bytes;
}
@Override
public Object readObject() throws IOException, ClassNotFoundException {
return readObject(Object.class);
}
@Override
public <T> T readObject(Class<T> cls) throws IOException {
updateClassLoaderIfNeed();
int length = readLength();
byte[] bytes = new byte[length];
int read = is.read(bytes, 0, length);
if (read != length) {
throw new IllegalArgumentException(
"deserialize failed. expected read length: " + length + " but actual read: " + read);
}
Fastjson2SecurityManager.Handler securityFilter = fastjson2SecurityManager.getSecurityFilter();
T result;
if (securityFilter.isCheckSerializable()) {
result = JSONB.parseObject(
bytes,
cls,
securityFilter,
JSONReader.Feature.UseDefaultConstructorAsPossible,
JSONReader.Feature.ErrorOnNoneSerializable,
JSONReader.Feature.IgnoreAutoTypeNotMatch,
JSONReader.Feature.UseNativeObject,
JSONReader.Feature.FieldBased);
} else {
result = JSONB.parseObject(
bytes,
cls,
securityFilter,
JSONReader.Feature.UseDefaultConstructorAsPossible,
JSONReader.Feature.UseNativeObject,
JSONReader.Feature.IgnoreAutoTypeNotMatch,
JSONReader.Feature.FieldBased);
}
if (result != null && cls != null && !ClassUtils.isMatch(result.getClass(), cls)) {
throw new IllegalArgumentException(
"deserialize failed. expected class: " + cls + " but actual class: " + result.getClass());
}
return result;
}
@Override
public <T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException {
updateClassLoaderIfNeed();
int length = readLength();
byte[] bytes = new byte[length];
int read = is.read(bytes, 0, length);
if (read != length) {
throw new IllegalArgumentException(
"deserialize failed. expected read length: " + length + " but actual read: " + read);
}
Fastjson2SecurityManager.Handler securityFilter = fastjson2SecurityManager.getSecurityFilter();
T result;
if (securityFilter.isCheckSerializable()) {
result = JSONB.parseObject(
bytes,
cls,
securityFilter,
JSONReader.Feature.UseDefaultConstructorAsPossible,
JSONReader.Feature.ErrorOnNoneSerializable,
JSONReader.Feature.IgnoreAutoTypeNotMatch,
JSONReader.Feature.UseNativeObject,
JSONReader.Feature.FieldBased);
} else {
result = JSONB.parseObject(
bytes,
cls,
securityFilter,
JSONReader.Feature.UseDefaultConstructorAsPossible,
JSONReader.Feature.UseNativeObject,
JSONReader.Feature.IgnoreAutoTypeNotMatch,
JSONReader.Feature.FieldBased);
}
if (result != null && cls != null && !ClassUtils.isMatch(result.getClass(), cls)) {
throw new IllegalArgumentException(
"deserialize failed. expected class: " + cls + " but actual class: " + result.getClass());
}
return result;
}
private void updateClassLoaderIfNeed() {
ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
if (currentClassLoader != classLoader) {
fastjson2CreatorManager.setCreator(currentClassLoader);
classLoader = currentClassLoader;
}
}
private int readLength() throws IOException {
byte[] bytes = new byte[Integer.BYTES];
int read = is.read(bytes, 0, Integer.BYTES);
if (read != Integer.BYTES) {
throw new IllegalArgumentException(
"deserialize failed. expected read length: " + Integer.BYTES + " but actual read: " + read);
}
int value = 0;
for (byte b : bytes) {
value = (value << 8) + (b & 0xFF);
}
return value;
}
}
| 8,852 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/Fastjson2ScopeModelInitializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.beans.factory.ScopeBeanFactory;
import org.apache.dubbo.rpc.model.ApplicationModel;
import org.apache.dubbo.rpc.model.FrameworkModel;
import org.apache.dubbo.rpc.model.ModuleModel;
import org.apache.dubbo.rpc.model.ScopeModelInitializer;
public class Fastjson2ScopeModelInitializer implements ScopeModelInitializer {
@Override
public void initializeFrameworkModel(FrameworkModel frameworkModel) {
ScopeBeanFactory beanFactory = frameworkModel.getBeanFactory();
beanFactory.registerBean(Fastjson2CreatorManager.class);
beanFactory.registerBean(Fastjson2SecurityManager.class);
}
@Override
public void initializeApplicationModel(ApplicationModel applicationModel) {}
@Override
public void initializeModuleModel(ModuleModel moduleModel) {}
}
| 8,853 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-fastjson2/src/main/java/org/apache/dubbo/common/serialize/fastjson2/Fastjson2SecurityManager.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.fastjson2;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.AllowClassNotifyListener;
import org.apache.dubbo.common.utils.ConcurrentHashSet;
import org.apache.dubbo.common.utils.SerializeCheckStatus;
import org.apache.dubbo.common.utils.SerializeSecurityManager;
import org.apache.dubbo.rpc.model.FrameworkModel;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import com.alibaba.fastjson2.filter.ContextAutoTypeBeforeHandler;
import com.alibaba.fastjson2.util.TypeUtils;
import static com.alibaba.fastjson2.util.TypeUtils.loadClass;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.PROTOCOL_UNTRUSTED_SERIALIZE_CLASS;
import static org.apache.dubbo.common.utils.SerializeCheckStatus.STRICT;
public class Fastjson2SecurityManager implements AllowClassNotifyListener {
private volatile Handler securityFilter;
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(Fastjson2SecurityManager.class);
private final SerializeSecurityManager securityManager;
private volatile SerializeCheckStatus status = AllowClassNotifyListener.DEFAULT_STATUS;
private volatile boolean checkSerializable = true;
private volatile Set<String> allowedList = new ConcurrentHashSet<>(1);
private volatile Set<String> disAllowedList = new ConcurrentHashSet<>(1);
public Fastjson2SecurityManager(FrameworkModel frameworkModel) {
securityManager = frameworkModel.getBeanFactory().getOrRegisterBean(SerializeSecurityManager.class);
securityManager.registerListener(this);
securityFilter = new Handler(
AllowClassNotifyListener.DEFAULT_STATUS,
securityManager,
true,
new String[0],
new ConcurrentHashSet<>());
}
@Override
public synchronized void notifyPrefix(Set<String> allowedList, Set<String> disAllowedList) {
this.allowedList = allowedList;
this.disAllowedList = disAllowedList;
this.securityFilter = new Handler(
this.status,
this.securityManager,
this.checkSerializable,
this.allowedList.toArray(new String[0]),
this.disAllowedList);
}
@Override
public synchronized void notifyCheckStatus(SerializeCheckStatus status) {
this.status = status;
this.securityFilter = new Handler(
this.status,
this.securityManager,
this.checkSerializable,
this.allowedList.toArray(new String[0]),
this.disAllowedList);
}
@Override
public synchronized void notifyCheckSerializable(boolean checkSerializable) {
this.checkSerializable = checkSerializable;
this.securityFilter = new Handler(
this.status,
this.securityManager,
this.checkSerializable,
this.allowedList.toArray(new String[0]),
this.disAllowedList);
}
public Handler getSecurityFilter() {
return securityFilter;
}
public static class Handler extends ContextAutoTypeBeforeHandler {
final SerializeCheckStatus status;
final SerializeSecurityManager serializeSecurityManager;
final Map<String, Class<?>> classCache = new ConcurrentHashMap<>(16, 0.75f, 1);
final Set<String> disAllowedList;
final boolean checkSerializable;
public Handler(
SerializeCheckStatus status,
SerializeSecurityManager serializeSecurityManager,
boolean checkSerializable,
String[] acceptNames,
Set<String> disAllowedList) {
super(true, acceptNames);
this.status = status;
this.serializeSecurityManager = serializeSecurityManager;
this.checkSerializable = checkSerializable;
this.disAllowedList = disAllowedList;
}
@Override
public Class<?> apply(String typeName, Class<?> expectClass, long features) {
Class<?> tryLoad = super.apply(typeName, expectClass, features);
// 1. in allow list, return
if (tryLoad != null) {
return tryLoad;
}
// 2. check if in strict mode
if (status == STRICT) {
String msg = "[Serialization Security] Serialized class " + typeName + " is not in allow list. "
+ "Current mode is `STRICT`, will disallow to deserialize it by default. "
+ "Please add it into security/serialize.allowlist or follow FAQ to configure it.";
if (serializeSecurityManager.getWarnedClasses().add(typeName)) {
logger.error(PROTOCOL_UNTRUSTED_SERIALIZE_CLASS, "", "", msg);
}
throw new IllegalArgumentException(msg);
}
// 3. try load
Class<?> localClass = loadClassDirectly(typeName);
if (localClass != null) {
if (status == SerializeCheckStatus.WARN
&& serializeSecurityManager.getWarnedClasses().add(typeName)) {
logger.warn(
PROTOCOL_UNTRUSTED_SERIALIZE_CLASS,
"",
"",
"[Serialization Security] Serialized class " + localClass.getName()
+ " is not in allow list. "
+ "Current mode is `WARN`, will allow to deserialize it by default. "
+ "Dubbo will set to `STRICT` mode by default in the future. "
+ "Please add it into security/serialize.allowlist or follow FAQ to configure it.");
}
return localClass;
}
// 4. class not found
return null;
}
public boolean checkIfDisAllow(String typeName) {
return disAllowedList.stream().anyMatch(typeName::startsWith);
}
public boolean isCheckSerializable() {
return checkSerializable;
}
public Class<?> loadClassDirectly(String typeName) {
Class<?> clazz = classCache.get(typeName);
if (clazz == null && checkIfDisAllow(typeName)) {
clazz = DenyClass.class;
String msg = "[Serialization Security] Serialized class " + typeName + " is in disAllow list. "
+ "Current mode is `WARN`, will disallow to deserialize it by default. "
+ "Please add it into security/serialize.allowlist or follow FAQ to configure it.";
if (serializeSecurityManager.getWarnedClasses().add(typeName)) {
logger.warn(PROTOCOL_UNTRUSTED_SERIALIZE_CLASS, "", "", msg);
}
}
if (clazz == null) {
clazz = TypeUtils.getMapping(typeName);
}
if (clazz == null) {
clazz = loadClass(typeName);
}
if (clazz != null) {
Class<?> origin = classCache.putIfAbsent(typeName, clazz);
if (origin != null) {
clazz = origin;
}
}
if (clazz == DenyClass.class) {
return null;
}
return clazz;
}
}
private static class DenyClass {
// To indicate that the target class has been reject
}
}
| 8,854 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/JavaSerialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.PROTOCOL_UNSAFE_SERIALIZATION;
import static org.apache.dubbo.common.serialize.Constants.JAVA_SERIALIZATION_ID;
/**
* Java serialization implementation
*
* <pre>
* e.g. <dubbo:protocol serialization="java" />
* </pre>
*/
public class JavaSerialization implements Serialization {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(JavaSerialization.class);
private static final AtomicBoolean warn = new AtomicBoolean(false);
@Override
public byte getContentTypeId() {
return JAVA_SERIALIZATION_ID;
}
@Override
public String getContentType() {
return "x-application/java";
}
@Override
public ObjectOutput serialize(URL url, OutputStream out) throws IOException {
if (warn.compareAndSet(false, true)) {
logger.error(
PROTOCOL_UNSAFE_SERIALIZATION,
"",
"",
"Java serialization is unsafe. Dubbo Team do not recommend anyone to use it."
+ "If you still want to use it, please follow [JEP 290](https://openjdk.java.net/jeps/290)"
+ "to set serialization filter to prevent deserialization leak.");
}
return new JavaObjectOutput(out);
}
@Override
public ObjectInput deserialize(URL url, InputStream is) throws IOException {
if (warn.compareAndSet(false, true)) {
logger.error(
PROTOCOL_UNSAFE_SERIALIZATION,
"",
"",
"Java serialization is unsafe. Dubbo Team do not recommend anyone to use it."
+ "If you still want to use it, please follow [JEP 290](https://openjdk.java.net/jeps/290)"
+ "to set serialization filter to prevent deserialization leak.");
}
return new JavaObjectInput(is);
}
}
| 8,855 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/JavaObjectOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import org.apache.dubbo.common.serialize.nativejava.NativeJavaObjectOutput;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
/**
* Java object output implementation
*/
public class JavaObjectOutput extends NativeJavaObjectOutput {
public JavaObjectOutput(OutputStream os) throws IOException {
super(new ObjectOutputStream(os));
}
public JavaObjectOutput(OutputStream os, boolean compact) throws IOException {
super(compact ? new CompactedObjectOutputStream(os) : new ObjectOutputStream(os));
}
@Override
public void writeUTF(String v) throws IOException {
if (v == null) {
getObjectOutputStream().writeInt(-1);
} else {
getObjectOutputStream().writeInt(v.length());
getObjectOutputStream().writeUTF(v);
}
}
@Override
public void writeObject(Object obj) throws IOException {
if (obj == null) {
getObjectOutputStream().writeByte(0);
} else {
getObjectOutputStream().writeByte(1);
getObjectOutputStream().writeObject(obj);
}
}
@Override
public void flushBuffer() throws IOException {
getObjectOutputStream().flush();
}
}
| 8,856 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/CompactedJavaSerialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import static org.apache.dubbo.common.serialize.Constants.COMPACTED_JAVA_SERIALIZATION_ID;
/**
* Compacted java serialization implementation
*
* <pre>
* e.g. <dubbo:protocol serialization="compactedjava" />
* </pre>
*/
public class CompactedJavaSerialization implements Serialization {
@Override
public byte getContentTypeId() {
return COMPACTED_JAVA_SERIALIZATION_ID;
}
@Override
public String getContentType() {
return "x-application/compactedjava";
}
@Override
public ObjectOutput serialize(URL url, OutputStream out) throws IOException {
return new JavaObjectOutput(out, true);
}
@Override
public ObjectInput deserialize(URL url, InputStream is) throws IOException {
return new JavaObjectInput(is, true);
}
}
| 8,857 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/CompactedObjectInputStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import org.apache.dubbo.common.utils.ClassUtils;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectStreamClass;
import java.io.StreamCorruptedException;
/**
* Compacted java object input implementation
*/
public class CompactedObjectInputStream extends ObjectInputStream {
private ClassLoader mClassLoader;
public CompactedObjectInputStream(InputStream in) throws IOException {
this(in, Thread.currentThread().getContextClassLoader());
}
public CompactedObjectInputStream(InputStream in, ClassLoader cl) throws IOException {
super(in);
mClassLoader = cl == null ? ClassUtils.getClassLoader() : cl;
}
@Override
protected ObjectStreamClass readClassDescriptor() throws IOException, ClassNotFoundException {
int type = read();
if (type < 0) {
throw new EOFException();
}
switch (type) {
case 0:
return super.readClassDescriptor();
case 1:
Class<?> clazz = loadClass(readUTF());
return ObjectStreamClass.lookup(clazz);
default:
throw new StreamCorruptedException("Unexpected class descriptor type: " + type);
}
}
private Class<?> loadClass(String className) throws ClassNotFoundException {
return mClassLoader.loadClass(className);
}
}
| 8,858 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/JavaObjectInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import org.apache.dubbo.common.serialize.nativejava.NativeJavaObjectInput;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.lang.reflect.Type;
/**
* Java object input implementation
*/
public class JavaObjectInput extends NativeJavaObjectInput {
public static final int MAX_BYTE_ARRAY_LENGTH = 8 * 1024 * 1024;
public JavaObjectInput(InputStream is) throws IOException {
super(new ObjectInputStream(is));
}
public JavaObjectInput(InputStream is, boolean compacted) throws IOException {
super(compacted ? new CompactedObjectInputStream(is) : new ObjectInputStream(is));
}
@Override
public byte[] readBytes() throws IOException {
int len = getObjectInputStream().readInt();
if (len < 0) {
return null;
}
if (len == 0) {
return new byte[0];
}
if (len > MAX_BYTE_ARRAY_LENGTH) {
throw new IOException("Byte array length too large. " + len);
}
byte[] b = new byte[len];
getObjectInputStream().readFully(b);
return b;
}
@Override
public String readUTF() throws IOException {
int len = getObjectInputStream().readInt();
if (len < 0) {
return null;
}
return getObjectInputStream().readUTF();
}
@Override
public Object readObject() throws IOException, ClassNotFoundException {
byte b = getObjectInputStream().readByte();
if (b == 0) {
return null;
}
return getObjectInputStream().readObject();
}
@Override
@SuppressWarnings("unchecked")
public <T> T readObject(Class<T> cls) throws IOException, ClassNotFoundException {
return (T) readObject();
}
@Override
@SuppressWarnings("unchecked")
public <T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException {
return (T) readObject();
}
}
| 8,859 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/java/CompactedObjectOutputStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.java;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.OutputStream;
/**
* Compacted java object output implementation
*/
public class CompactedObjectOutputStream extends ObjectOutputStream {
public CompactedObjectOutputStream(OutputStream out) throws IOException {
super(out);
}
@Override
protected void writeClassDescriptor(ObjectStreamClass desc) throws IOException {
Class<?> clazz = desc.forClass();
if (clazz.isPrimitive() || clazz.isArray()) {
write(0);
super.writeClassDescriptor(desc);
} else {
write(1);
writeUTF(desc.getName());
}
}
}
| 8,860 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/nativejava/NativeJavaObjectInput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.nativejava;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.utils.Assert;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.lang.reflect.Type;
/**
* Native java object input implementation
*/
public class NativeJavaObjectInput implements ObjectInput {
private final ObjectInputStream inputStream;
public NativeJavaObjectInput(InputStream is) throws IOException {
this(new ObjectInputStream(is));
}
protected NativeJavaObjectInput(ObjectInputStream is) {
Assert.notNull(is, "input == null");
inputStream = is;
}
protected ObjectInputStream getObjectInputStream() {
return inputStream;
}
@Override
public Object readObject() throws IOException, ClassNotFoundException {
return inputStream.readObject();
}
@Override
@SuppressWarnings("unchecked")
public <T> T readObject(Class<T> cls) throws IOException, ClassNotFoundException {
return (T) readObject();
}
@Override
@SuppressWarnings("unchecked")
public <T> T readObject(Class<T> cls, Type type) throws IOException, ClassNotFoundException {
return (T) readObject();
}
@Override
public boolean readBool() throws IOException {
return inputStream.readBoolean();
}
@Override
public byte readByte() throws IOException {
return inputStream.readByte();
}
@Override
public short readShort() throws IOException {
return inputStream.readShort();
}
@Override
public int readInt() throws IOException {
return inputStream.readInt();
}
@Override
public long readLong() throws IOException {
return inputStream.readLong();
}
@Override
public float readFloat() throws IOException {
return inputStream.readFloat();
}
@Override
public double readDouble() throws IOException {
return inputStream.readDouble();
}
@Override
public String readUTF() throws IOException {
return inputStream.readUTF();
}
@Override
public byte[] readBytes() throws IOException {
int len = inputStream.readInt();
if (len < 0) {
return null;
} else if (len == 0) {
return new byte[] {};
} else {
byte[] result = new byte[len];
inputStream.readFully(result);
return result;
}
}
}
| 8,861 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/nativejava/NativeJavaSerialization.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.nativejava;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.serialize.ObjectInput;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.serialize.Serialization;
import org.apache.dubbo.common.serialize.java.JavaSerialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.PROTOCOL_UNSAFE_SERIALIZATION;
import static org.apache.dubbo.common.serialize.Constants.NATIVE_JAVA_SERIALIZATION_ID;
/**
* Native java serialization implementation
*
* <pre>
* e.g. <dubbo:protocol serialization="nativejava" />
* </pre>
*/
public class NativeJavaSerialization implements Serialization {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(JavaSerialization.class);
private static final AtomicBoolean warn = new AtomicBoolean(false);
@Override
public byte getContentTypeId() {
return NATIVE_JAVA_SERIALIZATION_ID;
}
@Override
public String getContentType() {
return "x-application/nativejava";
}
@Override
public ObjectOutput serialize(URL url, OutputStream output) throws IOException {
if (warn.compareAndSet(false, true)) {
logger.error(
PROTOCOL_UNSAFE_SERIALIZATION,
"",
"",
"Java serialization is unsafe. Dubbo Team do not recommend anyone to use it."
+ "If you still want to use it, please follow [JEP 290](https://openjdk.java.net/jeps/290)"
+ "to set serialization filter to prevent deserialization leak.");
}
return new NativeJavaObjectOutput(output);
}
@Override
public ObjectInput deserialize(URL url, InputStream input) throws IOException {
if (warn.compareAndSet(false, true)) {
logger.error(
PROTOCOL_UNSAFE_SERIALIZATION,
"",
"",
"Java serialization is unsafe. Dubbo Team do not recommend anyone to use it."
+ "If you still want to use it, please follow [JEP 290](https://openjdk.java.net/jeps/290)"
+ "to set serialization filter to prevent deserialization leak.");
}
return new NativeJavaObjectInput(input);
}
}
| 8,862 |
0 |
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize
|
Create_ds/dubbo/dubbo-serialization/dubbo-serialization-jdk/src/main/java/org/apache/dubbo/common/serialize/nativejava/NativeJavaObjectOutput.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.serialize.nativejava;
import org.apache.dubbo.common.serialize.ObjectOutput;
import org.apache.dubbo.common.utils.Assert;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
/**
* Native java object output implementation
*/
public class NativeJavaObjectOutput implements ObjectOutput {
private final ObjectOutputStream outputStream;
public NativeJavaObjectOutput(OutputStream os) throws IOException {
this(new ObjectOutputStream(os));
}
protected NativeJavaObjectOutput(ObjectOutputStream out) {
Assert.notNull(out, "output == null");
this.outputStream = out;
}
protected ObjectOutputStream getObjectOutputStream() {
return outputStream;
}
@Override
public void writeObject(Object obj) throws IOException {
outputStream.writeObject(obj);
}
@Override
public void writeBool(boolean v) throws IOException {
outputStream.writeBoolean(v);
}
@Override
public void writeByte(byte v) throws IOException {
outputStream.writeByte(v);
}
@Override
public void writeShort(short v) throws IOException {
outputStream.writeShort(v);
}
@Override
public void writeInt(int v) throws IOException {
outputStream.writeInt(v);
}
@Override
public void writeLong(long v) throws IOException {
outputStream.writeLong(v);
}
@Override
public void writeFloat(float v) throws IOException {
outputStream.writeFloat(v);
}
@Override
public void writeDouble(double v) throws IOException {
outputStream.writeDouble(v);
}
@Override
public void writeUTF(String v) throws IOException {
outputStream.writeUTF(v);
}
@Override
public void writeBytes(byte[] v) throws IOException {
if (v == null) {
outputStream.writeInt(-1);
} else {
writeBytes(v, 0, v.length);
}
}
@Override
public void writeBytes(byte[] v, int off, int len) throws IOException {
if (v == null) {
outputStream.writeInt(-1);
} else {
outputStream.writeInt(len);
outputStream.write(v, off, len);
}
}
@Override
public void flushBuffer() throws IOException {
outputStream.flush();
}
}
| 8,863 |
0 |
Create_ds/dubbo/dubbo-container/dubbo-container-api/src/main/java/org/apache/dubbo
|
Create_ds/dubbo/dubbo-container/dubbo-container-api/src/main/java/org/apache/dubbo/container/Container.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.container;
import org.apache.dubbo.common.extension.SPI;
/**
* Container. (SPI, Singleton, ThreadSafe)
*/
@SPI("spring")
public interface Container {
/**
* start method to load the container.
*/
void start();
/**
* stop method to unload the container.
*/
void stop();
}
| 8,864 |
0 |
Create_ds/dubbo/dubbo-container/dubbo-container-api/src/main/java/org/apache/dubbo
|
Create_ds/dubbo/dubbo-container/dubbo-container-api/src/main/java/org/apache/dubbo/container/Main.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.container;
import org.apache.dubbo.common.extension.ExtensionLoader;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.ArrayUtils;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SPLIT_PATTERN;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.COMMON_THREAD_INTERRUPTED_EXCEPTION;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_START_DUBBO_ERROR;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_STOP_DUBBO_ERROR;
/**
* Main. (API, Static, ThreadSafe)
*
* This class is entry point loading containers.
*/
public class Main {
public static final String CONTAINER_KEY = "dubbo.container";
public static final String SHUTDOWN_HOOK_KEY = "dubbo.shutdown.hook";
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(Main.class);
private static final ExtensionLoader<Container> LOADER = ExtensionLoader.getExtensionLoader(Container.class);
private static final ReentrantLock LOCK = new ReentrantLock();
private static final Condition STOP = LOCK.newCondition();
public static void main(String[] args) {
try {
if (ArrayUtils.isEmpty(args)) {
String config = System.getProperty(CONTAINER_KEY, LOADER.getDefaultExtensionName());
args = COMMA_SPLIT_PATTERN.split(config);
}
final List<Container> containers = new ArrayList<Container>();
for (int i = 0; i < args.length; i++) {
containers.add(LOADER.getExtension(args[i]));
}
logger.info("Use container type(" + Arrays.toString(args) + ") to run dubbo serivce.");
if ("true".equals(System.getProperty(SHUTDOWN_HOOK_KEY))) {
Runtime.getRuntime().addShutdownHook(new Thread("dubbo-container-shutdown-hook") {
@Override
public void run() {
for (Container container : containers) {
try {
container.stop();
logger.info("Dubbo " + container.getClass().getSimpleName() + " stopped!");
} catch (Throwable t) {
logger.error(CONFIG_STOP_DUBBO_ERROR, "", "", t.getMessage(), t);
}
try {
LOCK.lock();
STOP.signal();
} finally {
LOCK.unlock();
}
}
}
});
}
for (Container container : containers) {
container.start();
logger.info("Dubbo " + container.getClass().getSimpleName() + " started!");
}
System.out.println(new SimpleDateFormat("[yyyy-MM-dd HH:mm:ss]").format(new Date())
+ " Dubbo service server started!");
} catch (RuntimeException e) {
logger.error(CONFIG_START_DUBBO_ERROR, "", "", e.getMessage(), e);
System.exit(1);
}
try {
LOCK.lock();
STOP.await();
} catch (InterruptedException e) {
logger.warn(
COMMON_THREAD_INTERRUPTED_EXCEPTION,
"",
"",
"Dubbo service server stopped, interrupted by other thread!",
e);
} finally {
LOCK.unlock();
}
}
}
| 8,865 |
0 |
Create_ds/dubbo/dubbo-container/dubbo-container-spring/src/test/java/org/apache/dubbo/container
|
Create_ds/dubbo/dubbo-container/dubbo-container-spring/src/test/java/org/apache/dubbo/container/spring/SpringContainerTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.container.spring;
import org.apache.dubbo.common.extension.ExtensionLoader;
import org.apache.dubbo.container.Container;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* StandaloneContainerTest
*/
class SpringContainerTest {
@Test
void testContainer() {
SpringContainer container = (SpringContainer)
ExtensionLoader.getExtensionLoader(Container.class).getExtension("spring");
container.start();
Assertions.assertEquals(
SpringContainer.class, container.context.getBean("container").getClass());
container.stop();
}
}
| 8,866 |
0 |
Create_ds/dubbo/dubbo-container/dubbo-container-spring/src/main/java/org/apache/dubbo/container
|
Create_ds/dubbo/dubbo-container/dubbo-container-spring/src/main/java/org/apache/dubbo/container/spring/SpringContainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.container.spring;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.container.Container;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_STOP_DUBBO_ERROR;
/**
* SpringContainer. (SPI, Singleton, ThreadSafe)
*
* The container class implementation for Spring
*/
public class SpringContainer implements Container {
public static final String SPRING_CONFIG = "dubbo.spring.config";
public static final String DEFAULT_SPRING_CONFIG = "classpath*:META-INF/spring/*.xml";
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(SpringContainer.class);
static ClassPathXmlApplicationContext context;
public static ClassPathXmlApplicationContext getContext() {
return context;
}
@Override
public void start() {
String configPath = System.getProperty(SPRING_CONFIG);
if (StringUtils.isEmpty(configPath)) {
configPath = DEFAULT_SPRING_CONFIG;
}
context = new ClassPathXmlApplicationContext(configPath.split("[,\\s]+"), false);
context.refresh();
context.start();
}
@Override
public void stop() {
try {
if (context != null) {
context.stop();
context.close();
context = null;
}
} catch (Throwable e) {
logger.error(CONFIG_STOP_DUBBO_ERROR, "", "", e.getMessage(), e);
}
}
}
| 8,867 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/test/java/org/apache/aries/events/memory/MemoryPositionTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.memory;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class MemoryPositionTest {
@Test
public void testCompareTo() throws Exception {
assertEquals(0, comparePositions(position(5), position(5)));
assertEquals(1, comparePositions(position(10), position(5)));
assertEquals(-1, comparePositions(position(2), position(5)));
}
private int comparePositions(MemoryPosition position1, MemoryPosition position2) {
return position1.compareTo(position2);
}
private MemoryPosition position(long offset) {
return new MemoryPosition(offset);
}
}
| 8,868 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/test/java/org/apache/aries/events/memory/MessagingTest.java
|
package org.apache.aries.events.memory;
import static org.apache.aries.events.api.SubscribeRequestBuilder.to;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.MockitoAnnotations.initMocks;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Messaging;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.Received;
import org.apache.aries.events.api.Seek;
import org.apache.aries.events.api.SubscribeRequestBuilder;
import org.apache.aries.events.api.Subscription;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
public class MessagingTest {
private static final long MAX_MANY = 100000l;
@Mock
private Consumer<Received> callback;
@Captor
private ArgumentCaptor<Received> messageCaptor;
private Set<Subscription> subscriptions = new HashSet<>();
private Messaging messaging;
@Before
public void before() {
initMocks(this);
messaging = new InMemoryMessaging();
}
@After
public void after() {
subscriptions.forEach(Subscription::close);
}
@Test
public void testPositionFromString() {
Position pos = messaging.positionFromString("1");
assertThat(pos.compareTo(new MemoryPosition(1)), equalTo(0));
assertThat(pos.positionToString(), equalTo("1"));
}
@Test
public void testSend() {
subscribe(to("test", callback).seek(Seek.earliest));
String content = "testcontent";
send("test", content);
assertMessages(1);
Received received = messageCaptor.getValue();
assertThat(received.getMessage().getPayload(), equalTo(toBytes(content)));
assertEquals(0, received.getPosition().compareTo(new MemoryPosition(0)));
assertThat(received.getMessage().getProperties().size(), equalTo(1));
assertThat(received.getMessage().getProperties().get("my"), equalTo("testvalue"));
}
@Test(expected=NullPointerException.class)
public void testInvalidSubscribe() {
subscribe(to("test", callback).seek(null));
}
@Test
public void testExceptionInHandler() {
doThrow(new RuntimeException("Expected exception")).when(callback).accept(Mockito.any(Received.class));
subscribe(to("test", callback));
send("test", "testcontent");
assertMessages(1);
}
@Test
public void testEarliestBefore() {
subscribe(to("test", callback).seek(Seek.earliest));
send("test", "testcontent");
send("test", "testcontent2");
assertMessages(2);
assertThat(messageContents(), contains("testcontent", "testcontent2"));
}
@Test
public void testEarliestAfter() {
send("test", "testcontent");
subscribe(to("test", callback).seek(Seek.earliest));
send("test", "testcontent2");
assertMessages(2);
assertThat(messageContents(), contains("testcontent", "testcontent2"));
}
@Test
public void testLatestBefore() {
subscribe(to("test", callback));
send("test", "testcontent");
send("test", "testcontent2");
assertMessages(2);
assertThat(messageContents(), contains("testcontent", "testcontent2"));
}
@Test
public void testLatest() {
send("test", "testcontent");
subscribe(to("test", callback));
send("test", "testcontent2");
assertMessages(1);
assertThat(messageContents(), contains("testcontent2"));
}
@Test
public void testFrom1() {
send("test", "testcontent");
send("test", "testcontent2");
subscribe(to("test", callback).startAt(new MemoryPosition(1l)).seek(Seek.earliest));
assertMessages(1);
assertThat(messageContents(), contains("testcontent2"));
}
@Test
public void testMany() {
AtomicLong count = new AtomicLong();
Consumer<Received> manyCallback = rec -> { count.incrementAndGet(); };
messaging.subscribe(to("test", manyCallback));
for (long c=0; c < MAX_MANY; c++) {
send("test", "content " + c);
if (c % 10000 == 0) {
System.out.println("Sending " + c);
}
}
await().until(count::get, equalTo(MAX_MANY));
}
private void assertMessages(int num) {
verify(callback, timeout(1000).times(num)).accept(messageCaptor.capture());
}
private void subscribe(SubscribeRequestBuilder request) {
this.subscriptions.add(messaging.subscribe(request));
}
private List<String> messageContents() {
return messageCaptor.getAllValues().stream()
.map(this::getContent).collect(Collectors.toList());
}
private String getContent(Received rec) {
return new String(rec.getMessage().getPayload(), Charset.forName("UTF-8"));
}
private void send(String topic, String content) {
Map<String, String> props = new HashMap<String, String>();
props.put("my", "testvalue");
Message message = new Message(toBytes(content), props);
messaging.send(topic, message);
}
private byte[] toBytes(String content) {
return content.getBytes(Charset.forName("UTF-8"));
}
}
| 8,869 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events/memory/InMemoryMessaging.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.memory;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Messaging;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.SubscribeRequestBuilder;
import org.apache.aries.events.api.SubscribeRequestBuilder.SubscribeRequest;
import org.apache.aries.events.api.Subscription;
import org.apache.aries.events.api.Type;
import org.osgi.service.component.annotations.Component;
@Component
@Type("memory")
public class InMemoryMessaging implements Messaging {
private final Map<String, Topic> topics = new ConcurrentHashMap<>();
private final int keepAtLeast;
public InMemoryMessaging() {
this(10000);
}
public InMemoryMessaging(int keepAtLeast) {
this.keepAtLeast = keepAtLeast;
}
@Override
public void send(String topicName, Message message) {
Topic topic = getOrCreate(topicName);
topic.send(message);
}
@Override
public Subscription subscribe(SubscribeRequestBuilder requestBuilder) {
SubscribeRequest request = requestBuilder.build();
Topic topic = getOrCreate(request.getTopic());
return topic.subscribe(request);
}
@Override
public Position positionFromString(String position) {
long offset = Long.parseLong(position);
return new MemoryPosition(offset);
}
private Topic getOrCreate(String topicName) {
return topics.computeIfAbsent(topicName, topicName2 -> new Topic(topicName2, keepAtLeast));
}
}
| 8,870 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events/memory/Journal.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.memory;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicLong;
class Journal<T> {
private final int keepAtLeast;
private final AtomicLong nextOffset = new AtomicLong();
private final ConcurrentNavigableMap<Long, T> messages = new ConcurrentSkipListMap<>();
private final AtomicLong count = new AtomicLong();
public Journal(int keepAtLeast) {
this.keepAtLeast = keepAtLeast;
}
public long append(T message) {
if (count.incrementAndGet() > keepAtLeast * 2) {
evict();
}
Long offset = nextOffset.getAndIncrement();
messages.put(offset, message);
return offset;
}
private synchronized void evict() {
Iterator<Long> it = messages.keySet().iterator();
for (int c = 0; c < keepAtLeast; c++) {
messages.remove(it.next());
}
count.set(0);
}
public long getFirstOffset() {
try {
return messages.firstKey();
} catch (NoSuchElementException e) {
return 0;
}
}
public long getLastOffset() {
try {
return messages.lastKey();
} catch (NoSuchElementException e) {
return -1;
}
}
public Entry<Long, T> getNext(long offset) {
return this.messages.ceilingEntry(offset);
}
}
| 8,871 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events/memory/MemoryPosition.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.memory;
import org.apache.aries.events.api.Position;
class MemoryPosition implements Position {
private long offset;
MemoryPosition(long offset) {
this.offset = offset;
}
long getOffset() {
return offset;
}
@Override
public String positionToString() {
return Long.toString(offset);
}
@Override
public int compareTo(Position p) {
return Long.compare(offset, ((MemoryPosition)p).offset);
}
}
| 8,872 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.memory/src/main/java/org/apache/aries/events/memory/Topic.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.memory;
import java.util.Map.Entry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.Received;
import org.apache.aries.events.api.Seek;
import org.apache.aries.events.api.SubscribeRequestBuilder.SubscribeRequest;
import org.apache.aries.events.api.Subscription;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class Topic {
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final String topicName;
private final Journal<Message> journal;
public Topic(String topicName, int keepAtLeast) {
this.topicName = topicName;
this.journal = new Journal<>(keepAtLeast);
}
public synchronized Position send(Message message) {
long offset = this.journal.append(message);
notifyAll();
return new MemoryPosition(offset);
}
public Subscription subscribe(SubscribeRequest request) {
long startOffset = getStartOffset((MemoryPosition) request.getPosition(), request.getSeek());
log.debug("Consuming from " + startOffset);
return new TopicSubscription(startOffset, request.getCallback());
}
private long getStartOffset(MemoryPosition position, Seek seek) {
if (position != null) {
return position.getOffset();
} else {
if (seek == Seek.earliest) {
return this.journal.getFirstOffset();
} else {
return this.journal.getLastOffset() + 1;
}
}
}
private synchronized Entry<Long, Message> waitNext(long currentOffset) throws InterruptedException {
Entry<Long, Message> entry = journal.getNext(currentOffset);
if (entry != null) {
return entry;
}
log.debug("Waiting for next message");
wait();
return journal.getNext(currentOffset);
}
class TopicSubscription implements Subscription {
private Consumer<Received> callback;
private ExecutorService executor;
private long currentOffset;
TopicSubscription(long startOffset, Consumer<Received> callback) {
this.currentOffset = startOffset;
this.callback = callback;
String name = "Poller for " + topicName;
this.executor = Executors.newSingleThreadExecutor(r -> new Thread(r, name));
this.executor.execute(this::poll);
}
private void poll() {
try {
while (true) {
Entry<Long, Message> entry = waitNext(currentOffset);
if (entry != null) {
handleMessage(entry);
}
}
} catch (InterruptedException e) {
log.debug("Poller thread for consumer on topic " + topicName + " stopped.");
}
}
private void handleMessage(Entry<Long, Message> entry) {
long offset = entry.getKey();
try {
MemoryPosition position = new MemoryPosition(this.currentOffset);
Received received = new Received(position, entry.getValue());
callback.accept(received);
} catch (Exception e) {
log.warn(e.getMessage(), e);
}
this.currentOffset = offset + 1;
}
@Override
public void close() {
executor.shutdown();
executor.shutdownNow();
}
}
}
| 8,873 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka/KafkaMessagingTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Messaging;
import org.apache.aries.events.api.SubscribeRequestBuilder;
import org.apache.aries.events.api.Subscription;
import org.apache.aries.events.kafka.setup.KafkaBaseTest;
import org.junit.Test;
import org.mockito.Mockito;
import static java.nio.charset.Charset.forName;
import static java.util.Collections.singletonMap;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
public class KafkaMessagingTest extends KafkaBaseTest {
@Test
public void testPositionFromString() throws Exception {
Messaging messaging = new KafkaMessaging();
KafkaPosition kafkaPosition = (KafkaPosition) messaging.positionFromString("0:100");
assertEquals(0, kafkaPosition.getPartition());
assertEquals(100, kafkaPosition.getOffset());
}
@Test(expected = IllegalArgumentException.class)
public void testPositionFromStringIllegalArgument() throws Exception {
Messaging messaging = new KafkaMessaging();
messaging.positionFromString("0:100:23");
}
@Test(timeout = 10000)
public void testSendAndReceive() throws Exception {
String topic = "test_send_and_receive";
createTopic(topic, 1);
KafkaEndpoint kafkaEndpoint = Mockito.mock(KafkaEndpoint.class);
when(kafkaEndpoint.kafkaBootstrapServers())
.thenReturn(getKafkaLocal().getKafkaBootstrapServer());
KafkaMessaging messaging = new KafkaMessaging();
messaging.activate(kafkaEndpoint);
byte[] payload = "test".getBytes(forName("UTF-8"));
Message message = new Message(payload, singletonMap("prop1", "value1"));
messaging.send(topic, message);
Semaphore invoked = new Semaphore(0);
SubscribeRequestBuilder requestBuilder = SubscribeRequestBuilder
.to(topic, (received) -> invoked.release())
.startAt(new KafkaPosition(0, 0));
try (Subscription subscription = messaging.subscribe(requestBuilder)) {
invoked.tryAcquire(10, TimeUnit.SECONDS);
}
messaging.deactivate();
}
}
| 8,874 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka/KafkaPositionTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import java.util.NavigableMap;
import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.aries.events.api.Position;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class KafkaPositionTest {
private static final Random RAND = new Random();
@Test
public void testGetPartition() throws Exception {
assertEquals(10, new KafkaPosition(10, 1000).getPartition());
}
@Test
public void testGetOffset() throws Exception {
assertEquals(1000, new KafkaPosition(10, 1000).getOffset());
}
@Test
public void testPositionToString() throws Exception {
assertEquals("10:1000", new KafkaPosition(10, 1000).positionToString());
}
@Test
public void testCompareTo() throws Exception {
assertEquals(0, comparePositions(position(RAND.nextInt(), 5), position(RAND.nextInt(), 5)));
assertEquals(1, comparePositions(position(RAND.nextInt(), 10), position(RAND.nextInt(), 5)));
assertEquals(-1, comparePositions(position(RAND.nextInt(), 2), position(RAND.nextInt(), 5)));
}
@Test
public void testOrder() {
NavigableMap<Position, String> positions = new TreeMap<>();
positions.put(new KafkaPosition(0, 0), "earliest");
positions.put(new KafkaPosition(0, 1), "mid");
positions.put(new KafkaPosition(0, 2), "latest");
assertEquals("earliest", positions.firstEntry().getValue());
assertEquals("latest", positions.lastEntry().getValue());
}
private int comparePositions(KafkaPosition position1, KafkaPosition position2) {
return position1.compareTo(position2);
}
private KafkaPosition position(int partition, long offset) {
return new KafkaPosition(partition, offset);
}
}
| 8,875 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka/setup/KafkaBaseTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka.setup;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.Collections;
import java.util.Set;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.DeleteTopicsResult;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static java.nio.file.Files.createTempDirectory;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.apache.aries.events.kafka.setup.KafkaLocal.getKafkaProperties;
import static org.apache.kafka.clients.admin.AdminClient.create;
import static org.apache.kafka.clients.admin.AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG;
public class KafkaBaseTest {
private static KafkaLocal kafkaLocal;
private static ZooKeeperLocal zooKeeperLocal;
private static Logger LOG = LoggerFactory.getLogger(KafkaBaseTest.class);
@BeforeClass
public static void startKafka() throws IOException {
int zkPort = randomAvailablePort();
String zkDir = createTempDirectory("zk").toString();
String zkConnect = format("127.0.0.1:%s", zkPort);
zooKeeperLocal = new ZooKeeperLocal(ZooKeeperLocal.getZooKeeperProperties(zkDir, zkPort));
LOG.info(format("Started local ZooKeeper server on port %s and dataDirectory %s", zkPort, zkDir));
int kafkaPort = randomAvailablePort();
String kafkaLogDir = createTempDirectory("kafka").toString();
kafkaLocal = new KafkaLocal(getKafkaProperties(kafkaLogDir, kafkaPort, zkConnect));
LOG.info(format("Started local Kafka on port %s and logDirectory %s", zkConnect, kafkaLogDir));
}
@AfterClass
public static void shutdownKafka() {
if (kafkaLocal != null) {
kafkaLocal.stop();
}
if (zooKeeperLocal != null) {
zooKeeperLocal.stop();
}
}
public static KafkaLocal getKafkaLocal() {
return kafkaLocal;
}
public Set<String> listTopics() {
try (AdminClient admin = buildAdminClient()) {
ListTopicsResult result = admin.listTopics();
return result.names().get();
} catch (Exception e) {
throw new RuntimeException("Failed to list topics", e);
}
}
public void createTopic(String topicName, int numPartitions) {
NewTopic newTopic = new NewTopic(topicName, numPartitions, (short) 1);
try (AdminClient admin = buildAdminClient()) {
CreateTopicsResult result = admin.createTopics(singletonList(newTopic));
result.values().get(topicName).get();
LOG.info(format("created topic %s", topicName));
} catch (Exception e) {
throw new RuntimeException(format("Failed to create topic %s", topicName), e);
}
}
public void deleteTopic(String topicName) {
try (AdminClient admin = buildAdminClient()) {
DeleteTopicsResult result = admin.deleteTopics(Collections.singleton(topicName));
result.all().get();
LOG.info(format("deleted topic %s", topicName));
} catch (Exception e) {
throw new RuntimeException(format("Failed to delete topic %s", topicName), e);
}
}
private static int randomAvailablePort() {
try (ServerSocket ss = new ServerSocket(0)) {
return ss.getLocalPort();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private AdminClient buildAdminClient() {
return create(singletonMap(BOOTSTRAP_SERVERS_CONFIG, kafkaLocal.getKafkaBootstrapServer()));
}
}
| 8,876 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka/setup/ZooKeeperLocal.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka.setup;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Properties;
import org.apache.zookeeper.server.ServerConfig;
import org.apache.zookeeper.server.ZooKeeperServerMain;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
public class ZooKeeperLocal {
private final ZooKeeperServerMain server;
public ZooKeeperLocal(Properties zkProperties) {
QuorumPeerConfig quorumPeerConfig = new QuorumPeerConfig();
try {
quorumPeerConfig.parseProperties(zkProperties);
} catch (Exception e) {
throw new RuntimeException(e);
}
ServerConfig serverConfig = new ServerConfig();
serverConfig.readFrom(quorumPeerConfig);
server = new ZooKeeperServerMain();
Thread dt = new Thread(runnable(serverConfig));
dt.setDaemon(true);
dt.start();
}
public void stop() {
try {
Method shutdown = server.getClass().getDeclaredMethod("shutdown");
shutdown.setAccessible(true);
shutdown.invoke(server);
} catch (Exception e) {
throw new RuntimeException();
}
}
public static Properties getZooKeeperProperties(String dataDirectory, int port) {
Properties props = new Properties();
props.put("dataDir", dataDirectory);
props.put("clientPort", port);
return props;
}
private Runnable runnable(ServerConfig serverConfig) {
return () -> {
try {
server.runFromConfig(serverConfig);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
}
| 8,877 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/test/java/org/apache/aries/events/kafka/setup/KafkaLocal.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka.setup;
import java.util.HashMap;
import java.util.Map;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServerStartable;
import static java.lang.String.format;
public class KafkaLocal {
private final KafkaServerStartable server;
private final String kafkaBootstrapServer;
public KafkaLocal(Map<String, Object> kafkaProperties) {
KafkaConfig kafkaConfig = new KafkaConfig(kafkaProperties);
kafkaBootstrapServer = format("%s:%s", kafkaConfig.hostName(), kafkaConfig.port());
server = new KafkaServerStartable(kafkaConfig);
server.startup();
}
public void stop() {
server.shutdown();
}
public String getKafkaBootstrapServer() {
return kafkaBootstrapServer;
}
public static Map<String, Object> getKafkaProperties(String logDir, int port, String zkConnect) {
Map<String, Object> props = new HashMap<>();
props.put("host.name", "localhost");
props.put("log.dir", logDir);
props.put("port", port);
props.put("zookeeper.connect", zkConnect);
return props;
}
}
| 8,878 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events/kafka/KafkaSubscription.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import java.util.function.Consumer;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.Received;
import org.apache.aries.events.api.Subscription;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static java.time.Duration.ofHours;
import static java.util.Objects.requireNonNull;
import static org.apache.aries.events.kafka.KafkaMessaging.toMessage;
public class KafkaSubscription implements Subscription, Runnable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaSubscription.class);
private volatile boolean running = true;
private final KafkaConsumer<String, byte[]> consumer;
private final Consumer<Received> callback;
public KafkaSubscription(KafkaConsumer<String, byte[]> consumer, Consumer<Received> callback) {
this.consumer = requireNonNull(consumer);
this.callback = requireNonNull(callback);
}
@Override
public void run() {
try {
for (;running;) {
ConsumerRecords<String, byte[]> records = consumer.poll(ofHours(1));
records.forEach(record -> callback.accept(toReceived(record)));
}
} catch (WakeupException e) {
if (running) {
LOG.error("WakeupException while running {}", e.getMessage(), e);
throw e;
} else {
LOG.debug("WakeupException while stopping {}", e.getMessage(), e);
}
} catch(Throwable t) {
LOG.error(format("Catch Throwable %s closing subscription", t.getMessage()), t);
throw t;
} finally {
// Close the network connections and sockets
consumer.close();
}
}
@Override
public void close() {
running = false;
consumer.wakeup();
}
private Received toReceived(ConsumerRecord<String, byte[]> record) {
Position position = new KafkaPosition(record.partition(), record.offset());
return new Received(position, toMessage(record));
}
}
| 8,879 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events/kafka/KafkaMessaging.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Messaging;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.Seek;
import org.apache.aries.events.api.SubscribeRequestBuilder;
import org.apache.aries.events.api.SubscribeRequestBuilder.SubscribeRequest;
import org.apache.aries.events.api.Subscription;
import org.apache.aries.events.api.Type;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.ConfigurationPolicy;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.metatype.annotations.Designate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.Integer.parseInt;
import static java.lang.Long.parseLong;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Collections.singleton;
import static java.util.Collections.unmodifiableMap;
import static java.util.stream.StreamSupport.stream;
import static org.apache.kafka.clients.consumer.ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.ACKS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
@Type("kafka")
@Component(service = Messaging.class, configurationPolicy = ConfigurationPolicy.REQUIRE)
@Designate(ocd = KafkaEndpoint.class)
public class KafkaMessaging implements Messaging {
private static final Logger LOG = LoggerFactory.getLogger(KafkaMessaging.class);
/**
* The partition to send and receive records.
*/
private static final int PARTITION = 0;
/**
* Shared Kafka producer instance ({@code KafkaProducer}s are thread-safe).
*/
private KafkaProducer<String, byte[]> producer;
private Map<String, Object> producerConfig;
private KafkaEndpoint endPoint;
@Activate
public void activate(KafkaEndpoint endPoint) {
this.endPoint = endPoint;
producerConfig = new HashMap<>();
producerConfig.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerConfig.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
producerConfig.put(BOOTSTRAP_SERVERS_CONFIG, endPoint.kafkaBootstrapServers());
// We favour durability over throughput
// and thus requires full acknowledgment
// from replica leader and followers.
producerConfig.put(ACKS_CONFIG, "all");
producerConfig = unmodifiableMap(producerConfig);
}
@Deactivate
public void deactivate() {
closeQuietly(producer);
}
@Override
public void send(String topic, Message message) {
ProducerRecord<String, byte[]> record = new ProducerRecord<String, byte[]>(topic, PARTITION, null, message.getPayload(), toHeaders(message.getProperties()));
try {
RecordMetadata metadata = kafkaProducer().send(record).get();
LOG.info(format("Sent to %s", metadata));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(format("Failed to send mesage on topic %s", topic), e);
}
}
@Override
public Subscription subscribe(SubscribeRequestBuilder requestBuilder) {
SubscribeRequest request = requestBuilder.build();
KafkaConsumer<String, byte[]> consumer = buildKafkaConsumer(request.getSeek());
TopicPartition topicPartition = new TopicPartition(request.getTopic(), PARTITION);
Collection<TopicPartition> topicPartitions = singleton(topicPartition);
consumer.assign(topicPartitions);
if (request.getPosition() != null) {
consumer.seek(topicPartition, asKafkaPosition(request.getPosition()).getOffset());
} else if (request.getSeek() == Seek.earliest) {
consumer.seekToBeginning(topicPartitions);
} else {
consumer.seekToEnd(topicPartitions);
}
KafkaSubscription subscription = new KafkaSubscription(consumer, request.getCallback());
// TODO pool the threads
Thread thread = new Thread(subscription);
thread.setDaemon(true);
thread.start();
return subscription;
}
@Override
public Position positionFromString(String position) {
String[] chunks = position.split(":");
if (chunks.length != 2) {
throw new IllegalArgumentException(format("Illegal position format %s", position));
}
return new KafkaPosition(parseInt(chunks[0]), parseLong(chunks[1]));
}
static String positionToString(Position position) {
KafkaPosition kafkaPosition = asKafkaPosition(position);
return format("%s:%s", kafkaPosition.getPartition(), kafkaPosition.getOffset());
}
static Iterable<Header> toHeaders(Map<String, String> properties) {
return properties.entrySet().stream()
.map(KafkaMessaging::toHeader)
.collect(Collectors.toList());
}
static Map<String, String> toProperties(Headers headers) {
return stream(headers.spliterator(), true)
.collect(Collectors.toMap(Header::key, header -> new String(header.value(), UTF_8)));
}
static RecordHeader toHeader(Map.Entry<String, String> property) {
return new RecordHeader(property.getKey(), property.getValue().getBytes(UTF_8));
}
static Message toMessage(ConsumerRecord<String, byte[]> record) {
return new Message(record.value(), toProperties(record.headers()));
}
private synchronized KafkaProducer<String, byte[]> kafkaProducer() {
if (producer == null) {
producer = new KafkaProducer<>(producerConfig);
}
return producer;
}
private KafkaConsumer<String, byte[]> buildKafkaConsumer(Seek seek) {
String groupId = UUID.randomUUID().toString();
Map<String, Object> consumerConfig = new HashMap<>();
consumerConfig.put(BOOTSTRAP_SERVERS_CONFIG, endPoint.kafkaBootstrapServers());
consumerConfig.put(GROUP_ID_CONFIG, groupId);
consumerConfig.put(ENABLE_AUTO_COMMIT_CONFIG, false);
consumerConfig.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerConfig.put(VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
consumerConfig.put(AUTO_OFFSET_RESET_CONFIG, seek.name());
return new KafkaConsumer<>(unmodifiableMap(consumerConfig));
}
private void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException ignore) {
// ignore
}
}
}
private static KafkaPosition asKafkaPosition(Position position) {
if (! KafkaPosition.class.isInstance(position)) {
throw new IllegalArgumentException(format("Position %s must be and instance of %s", position, KafkaPosition.class.getCanonicalName()));
}
return (KafkaPosition) position;
}
}
| 8,880 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events/kafka/KafkaEndpoint.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import org.osgi.service.metatype.annotations.AttributeDefinition;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
@ObjectClassDefinition(name = "Apache Aries Events - Apache Kafka endpoint",
description = "Apache Kafka endpoint")
public @interface KafkaEndpoint {
@AttributeDefinition(name = "Kafka Bootstrap Servers",
description = "A comma separated list of host/port pairs to use for establishing the initial connection to the Kafka cluster.")
String kafkaBootstrapServers() default "localhost:9092";
}
| 8,881 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.kafka/src/main/java/org/apache/aries/events/kafka/KafkaPosition.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aries.events.kafka;
import javax.annotation.Nonnull;
import org.apache.aries.events.api.Position;
public final class KafkaPosition implements Position {
private final int partition;
private final long offset;
public KafkaPosition(int partition, long offset) {
this.partition = partition;
this.offset = offset;
}
public int getPartition() {
return partition;
}
public long getOffset() {
return offset;
}
@Override
public String toString() {
return positionToString();
}
@Override
public String positionToString() {
return KafkaMessaging.positionToString(this);
}
@Override
public int compareTo(@Nonnull Position p) {
return Long.compare(offset, ((KafkaPosition)p).offset);
}
}
| 8,882 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/test/java/org/apache/aries/events/mongo/MongoProvider.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import org.bson.Document;
import org.junit.rules.ExternalResource;
import java.util.Optional;
import java.util.logging.Logger;
import static org.junit.Assume.assumeTrue;
/**
* Provides connection to an external mongodb instance
* New database gets created for each test and dropped
* afterwards.
* Database URL must be provided by mongoUri system
* property
*/
public class MongoProvider extends ExternalResource {
MongoCollection<Document> getCollection(String name) {
return database.getCollection(name);
}
//*********************************************
// Internals
//*********************************************
private static final String MONGO_URI_PROP = "aries.events.test.mongoUri";
private static final String DEFAULT_DB_NAME = "tmp_aries_events_test";
private MongoDatabase database;
private MongoClient client;
@Override
protected void before() {
String mongoUri = mongoUri();
client = MongoClients.create(mongoUri);
String dbName = Optional.ofNullable(new MongoClientURI(mongoUri).getDatabase())
.orElse(DEFAULT_DB_NAME);
database = client.getDatabase(dbName);
}
@Override
protected void after() {
if (database != null) {
database.drop();
}
if (client != null) {
client.close();
}
}
private static String mongoUri() {
String result = System.getProperty(MONGO_URI_PROP);
if (result == null) {
String message = "No mongo URI provided.\n" +
" In order to enable mongo tests, define " + MONGO_URI_PROP + " system property\n" +
" to point to a running instance of mongodb.\n" +
" Example:\n" +
" mvn test -D" + MONGO_URI_PROP + "=mongodb://localhost:27017/";
System.out.println("WARNING: " + message);
assumeTrue(message, false);
}
return result;
}
}
| 8,883 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/test/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/test/java/org/apache/aries/events/mongo/SenderReceiverTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import com.mongodb.client.MongoCollection;
import org.apache.aries.events.api.Message;
import org.bson.Document;
import org.junit.Rule;
import org.junit.Test;
import java.util.AbstractMap.SimpleEntry;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import static java.util.Collections.emptyMap;
import static org.apache.aries.events.mongo.MessageReceiverImpl.messageReceiver;
import static org.apache.aries.events.mongo.MessageSenderImpl.messageSender;
import static org.junit.Assert.assertEquals;
public class SenderReceiverTest {
@Test public void testReplicate() throws InterruptedException {
MongoCollection<Document> collection = mongoProvider.getCollection("events");
MessageSender sender = messageSender(collection, 1000 * 60 * 60 * 24 * 7);
MessageReceiver receiver = messageReceiver(collection);
Message expected = new Message(new byte[]{ 1, 2, 3 }, mapOf(
keyVal("key1", "val1"),
keyVal("key2", "val2"))
);
sender.send(expected);
sender.send(expected);
Message actual = receiver.receive(0);
assertEquals(expected, actual);
}
@Test(expected = NoSuchElementException.class)
public void testEvicted() throws InterruptedException {
MongoCollection<Document> collection = mongoProvider.getCollection("events");
MessageSender sender = messageSender(collection, 0);
MessageReceiver receiver = messageReceiver(collection);
Message expected = new Message(new byte[] { 1, 2, 3}, emptyMap());
sender.send(expected);
sender.send(expected);
receiver.receive(0);
}
//*********************************************
// Internals
//*********************************************
private MongoCollection<Document> collection;
@Rule
public MongoProvider mongoProvider = new MongoProvider();
private static Map.Entry<String, String> keyVal(String key, String value) {
return new SimpleEntry<>(key, value);
}
private static Map<String, String> mapOf(Map.Entry<String, String>... mappings) {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, String> entry : mappings) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
}
| 8,884 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MongoEndpoint.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.osgi.service.metatype.annotations.AttributeDefinition;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
@ObjectClassDefinition(
name = "MongoDB configuration",
description = "Mongodb URI"
)
public @interface MongoEndpoint {
@AttributeDefinition(
name = "Mongo URI",
description = "Specifies mongodb URI as it is specified here: https://docs.mongodb.com/manual/reference/connection-string/ "
)
String mongoUri() default "mongodb://localhost:27017/aem_distribution";
@AttributeDefinition(
name = "Max Age",
description = "Log retention time expressed in milliseconds"
)
long maxAge() default 1000L * 3600 * 24 * 7; // One week in ms
}
| 8,885 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MongoPosition.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.apache.aries.events.api.Position;
class MongoPosition implements Position {
static Position position(long index) {
return new MongoPosition(index);
}
static long index(Position position) {
return ((MongoPosition) position).index;
}
@Override
public String positionToString() {
return String.valueOf(index);
}
@Override
public int compareTo(Position o) {
long thatIndex = ((MongoPosition) o).index;
if (this.index > thatIndex) return 1;
if (this.index == thatIndex) return 0;
return -1;
}
// *******************************************************
// Private
// *******************************************************
private final long index;
private MongoPosition(long index) {
this.index = index;
}
}
| 8,886 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MongoSubscription.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Received;
import org.apache.aries.events.api.Seek;
import org.apache.aries.events.api.Subscription;
import org.slf4j.Logger;
import java.util.function.Consumer;
import static java.lang.Thread.currentThread;
import static java.lang.Thread.interrupted;
import static org.apache.aries.events.mongo.MongoPosition.position;
import static org.slf4j.LoggerFactory.getLogger;
final class MongoSubscription implements Subscription {
//*********************************************
// Creation
//*********************************************
static MongoSubscription subscription(
MessageReceiver receiver, long index, Seek fallBack, Consumer<Received> consumer
) {
assert index >= 0L : "Illegal log index: [" + index + "]";
return new MongoSubscription(receiver, index, consumer);
}
static MongoSubscription subscription(
MessageReceiver receiver, Seek seek, Consumer<Received> consumer
) {
switch (seek) {
case latest:
return new MongoSubscription(receiver, LATEST_INDEX, consumer);
case earliest:
return new MongoSubscription(receiver, EARLIEST_INDEX, consumer);
default:
throw new AssertionError(seek);
}
}
//*********************************************
// Package interface
//*********************************************
long index() {
return index;
}
//*********************************************
// Specialization
//*********************************************
@Override
public void close() {
receiver.close();
}
@Override
public String toString() {
return "Subscription" + receiver + '[' + index + ']';
}
//*********************************************
// Private
//*********************************************
private static final long LATEST_INDEX = -1;
private static final long EARLIEST_INDEX = -2;
private static final Logger LOGGER = getLogger(MongoSubscription.class);
private final MessageReceiver receiver;
private long index;
private final Consumer<Received> consumer;
private MongoSubscription(
MessageReceiver receiver, long index, Consumer<Received> consumer
) {
this.consumer = consumer;
this.receiver = receiver;
if (index == EARLIEST_INDEX) {
this.index = receiver.earliestIndex();
} else if (index == LATEST_INDEX) {
this.index = receiver.latestIndex();
} else {
this.index = index;
}
this.index = index == LATEST_INDEX ? receiver.latestIndex() : index;
startBackgroundThread(() -> poll(receiver), "MongoMessageConsumer-" + receiver);
}
private void poll(MessageReceiver receiver) {
while (!interrupted()) {
try {
Message message = receiver.receive(index);
LOGGER.debug("Received: " + message);
Received received = new Received(position(index), message);
consumer.accept(received);
index += 1L;
} catch (InterruptedException e) {
currentThread().interrupt();
} catch (Exception e) {
LOGGER.error("Error handling message", e);
}
}
LOGGER.debug("Quitting " + this);
receiver.close();
}
private static Thread startBackgroundThread(Runnable runnable, String threadName) {
Thread thread = new Thread(runnable, threadName);
thread.setDaemon(true);
thread.start();
return thread;
}
}
| 8,887 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/CachingFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.slf4j.Logger;
import java.io.Closeable;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import static org.slf4j.LoggerFactory.getLogger;
/**
* A factory that keeps previously created instances in a cache so that
* they will get reused if requested repeatedly.
* Currently there is no cache size limit implemented so this implementation
* is only good for the use case with limited parameter space.
* @param <K> key type. Serves as cache key as well as an input parameter for the
* factory method. Must provide sensible implementations for
* equals and hashCode methods
* @param <V> result type.
*/
public final class CachingFactory<K, V extends AutoCloseable> implements Closeable {
public static <K2, V2 extends AutoCloseable> CachingFactory<K2, V2> cachingFactory(Function<K2, V2> create) {
return new CachingFactory<K2, V2>(create);
}
/**
* Find or created a value for the specified key
* @param arg key instance
* @return either an existing (cached) value of newly created one.
*/
public synchronized V get(K arg) {
return cache.computeIfAbsent(arg, create);
}
/**
* Clears all cached instances properly disposing them.
*/
public synchronized void clear() {
cache.values().stream()
.forEach(CachingFactory::safeClose);
cache.clear();
}
/**
* Closing this factory properly disposing all cached instances
*/
@Override
public void close() {
clear();
}
//*********************************************
// Private
//*********************************************
private static final Logger LOG = getLogger(CachingFactory.class);
private final Map<K, V> cache = new HashMap<K, V>();
private final Function<K, V> create;
private CachingFactory(Function<K, V> create) {
this.create = create;
}
private static void safeClose(AutoCloseable closable) {
try {
closable.close();
} catch (Exception e) {
LOG.warn(e.getMessage(), e);
}
}
}
| 8,888 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MongoMessaging.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Messaging;
import org.apache.aries.events.api.Position;
import org.apache.aries.events.api.SubscribeRequestBuilder;
import org.apache.aries.events.api.SubscribeRequestBuilder.SubscribeRequest;
import org.apache.aries.events.api.Subscription;
import org.bson.Document;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.metatype.annotations.Designate;
import java.util.Optional;
import static org.apache.aries.events.mongo.Common.DEFAULT_DB_NAME;
import static org.apache.aries.events.mongo.MongoPosition.index;
import static org.apache.aries.events.mongo.MongoPosition.position;
import static org.apache.aries.events.mongo.MongoSubscription.subscription;
import static org.apache.aries.events.mongo.MessageSenderImpl.messageSender;
import static org.apache.aries.events.mongo.MessageReceiverImpl.messageReceiver;
import static org.apache.aries.events.mongo.CachingFactory.cachingFactory;
import static org.osgi.service.component.annotations.ConfigurationPolicy.REQUIRE;
@Component(service = Messaging.class, configurationPolicy = REQUIRE)
@Designate(ocd = MongoEndpoint.class)
public class MongoMessaging implements Messaging {
@Override
public void send(String topic, Message message) {
MessageSender sender = senderFactory.get(topic);
sender.send(message);
}
@Override
public Subscription subscribe(SubscribeRequestBuilder requestBuilder) {
SubscribeRequest request = requestBuilder.build();
MongoCollection<Document> collection = database.getCollection(request.getTopic());
MessageReceiver receiver = messageReceiver(collection);
return subscription(receiver, index(request.getPosition()), request.getSeek(), request.getCallback());
}
@Override
public Position positionFromString(String position) {
long index = Long.parseLong(position);
return position(index);
}
// *******************************************************
// Private
// *******************************************************
private CachingFactory<String, MessageSender> senderFactory;
private MongoClient client;
private MongoDatabase database;
@Activate
protected void activate(MongoEndpoint config) {
MongoClientURI uri = new MongoClientURI(config.mongoUri());
client = new MongoClient(uri);
String dbName = Optional.ofNullable(uri.getDatabase()).orElse(DEFAULT_DB_NAME);
this.database = client.getDatabase(dbName);
this.senderFactory = cachingFactory(topic -> {
MongoCollection<Document> collection = database.getCollection(topic);
return messageSender(collection, config.maxAge());
});
}
@Deactivate
protected void deactivate() {
client.close();
}
}
| 8,889 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/Common.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
import static com.mongodb.client.model.Filters.lte;
import static com.mongodb.client.model.Indexes.descending;
import static org.apache.aries.events.mongo.Common.Fields.INDEX;
/**
* Common string definitions
*/
@SuppressWarnings({"HardCodedStringLiteral", "InterfaceNeverImplemented"})
interface Common {
String DEFAULT_DB_NAME = "aem-replication";
/** MongoDB field names */
interface Fields {
String INDEX = "i";
String TIME_STAMP = "t";
String PAYLOAD = "d";
String PROPS = "p";
}
/**
* Returns the next available index in the collection
* @param col collection to check. The collection must contain
* log messages published by a Publisher instance
* @return the index that should be assigned to the next message when
* it gets published
*/
static long upcomingIndex(MongoCollection<Document> col) {
Document doc = col.find(lte(INDEX, Long.MAX_VALUE))
.sort(descending(INDEX))
.first();
if (doc != null) {
long latestAvailable = doc.getLong(INDEX);
return latestAvailable + 1L;
} else {
return 0L;
}
}
}
| 8,890 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MessageReceiverImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.Filters;
import org.apache.aries.events.api.Message;
import org.apache.aries.events.api.Received;
import org.apache.aries.events.mongo.Common.Fields;
import org.bson.Document;
import org.bson.types.Binary;
import org.slf4j.Logger;
import static java.lang.Math.min;
import static java.lang.System.currentTimeMillis;
import static java.lang.Thread.sleep;
import static java.util.Collections.emptyList;
import static org.apache.aries.events.mongo.Common.Fields.PAYLOAD;
import static org.apache.aries.events.mongo.Common.Fields.INDEX;
import static org.apache.aries.events.mongo.Common.upcomingIndex;
import static org.slf4j.LoggerFactory.getLogger;
final class MessageReceiverImpl implements MessageReceiver {
static MessageReceiver messageReceiver(MongoCollection<Document> col) {
return new MessageReceiverImpl(col, Optional.empty());
}
@Override
public Message receive(long index) throws InterruptedException {
fetch(index);
long bufferIndex = index - firstIndex;
assert bufferIndex < buffer.size() : bufferIndex + ", " + buffer.size();
return buffer.get((int) bufferIndex);
}
@Override
public long earliestIndex() {
refreshBuffer(FIRST_AVAILABLE);
return firstIndex;
}
@Override
public long latestIndex() {
long result = upcomingIndex(col);
if (result > 0) {
result -= 1;
}
return result;
}
@Override
public void close() {
// MongoDB driver doesn't like to be interruped so
// we try to get out of the poll loop in a gentle way
interrupted = true;
mongoClient.ifPresent(Mongo::close);
}
//*********************************************
// Internals
//*********************************************
private static final Logger LOGGER = getLogger(MessageReceiverImpl.class);
private static final long FINE_GRAINED_DELAY = 100L;
private static final long FIRST_AVAILABLE = -1;
private final Optional<MongoClient> mongoClient;
private final MongoCollection<Document> col;
private long maxWaitTime = 1000L;
private int fetchLimit = 100;
private long lastReceived = currentTimeMillis();
private long firstIndex = 0L;
private List<Message> buffer = emptyList();
private volatile boolean interrupted = false;
private MessageReceiverImpl(MongoCollection<Document> col, Optional<MongoClient> mongoClient) {
LOGGER.debug("Creating new receiver: " + col.getNamespace().getCollectionName());
this.mongoClient = mongoClient;
this.col = col;
}
private void fetch(long index) throws InterruptedException {
while (firstIndex > index || firstIndex + buffer.size() <= index) {
long delay = min(maxWaitTime, (currentTimeMillis() - lastReceived) / 2);
adaptivePause(delay);
refreshBuffer(index);
}
}
private void refreshBuffer(long index) {
long startIndex = index;
try (MongoCursor<Document> cursor = col.find(Filters.gte(INDEX, startIndex)).iterator()) {
List<Message> collected = new ArrayList<>(fetchLimit);
while (cursor.hasNext()) {
int i = collected.size();
Document document = cursor.next();
long idx = document.get(INDEX, Long.class);
if (startIndex == FIRST_AVAILABLE) {
startIndex = idx;
}
if (idx == startIndex + i) {
Binary payload = document.get(PAYLOAD, Binary.class);
Map<String, String> props = (Map<String, String>) document.get(Fields.PROPS);
Message message = new Message(payload.getData(), props);
collected.add(message);
} else {
if (i == 0) {
throw new NoSuchElementException("Element [" + startIndex + "] has been evicted from the log. Oldest available: [" + idx + "]");
} else {
throw new IllegalStateException("Missing element at [" + (startIndex + i) + "]. Next available at [" + idx + "]");
}
}
}
buffer = collected;
firstIndex = (startIndex == FIRST_AVAILABLE) ? 0L : startIndex;
if (collected.size() > 0) {
lastReceived = currentTimeMillis();
}
}
}
@SuppressWarnings("BusyWait")
private void adaptivePause(long ms) throws InterruptedException {
if (interrupted) {
throw new InterruptedException();
}
long currentTime = currentTimeMillis();
long stopTime = currentTime + ms;
while (currentTime < stopTime) {
if (interrupted) {
throw new InterruptedException();
}
sleep(min(FINE_GRAINED_DELAY, ms));
currentTime = currentTimeMillis();
}
}
}
| 8,891 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MessageReceiver.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.apache.aries.events.api.Message;
public interface MessageReceiver extends AutoCloseable {
/** returns data entry for the specified offset.
* If necessary waits until data is available.
* If data entry at the specified offset has
* been evicted, throws NoSuchElement exception
* @param index an offset to the desired entry
* @return requested data entry together with the
* offset for the next data entry
*/
Message receive(long index) throws InterruptedException;
/** returns the index of the earliest available
* data entry. It also causes the receiver to
* pre-fetch and cache a batch of earliest available
* entries thus giving the user a chance to consume
* them and catch up before they get evicted
* @return an index of the first available data entry or
* 0 if the log is empty
*/
long earliestIndex();
/** returns the index of the next available data
* entry.
* The returned index points to the entry yet to
* be inserted into the log.
* @return index of the data entry that will be
* inserted next. 0 if the log is empty.
*/
long latestIndex();
@Override
void close();
}
| 8,892 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MessageSender.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import org.apache.aries.events.api.Message;
import java.util.Map;
/**
* Provides an API for publishing data to a distribution log
*/
public interface MessageSender extends AutoCloseable {
/**
* Publishes a single message to a log.
* @param message specifies a message to publish.
* The following value types are supported:
* Integer
* Long
* Boolean
* String
* byte[]
*/
void send(Message message);
}
| 8,893 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.mongo/src/main/java/org/apache/aries/events/mongo/MessageSenderImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.mongo;
import com.mongodb.MongoWriteException;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.model.IndexOptions;
import org.apache.aries.events.api.Message;
import org.bson.Document;
import org.slf4j.Logger;
import java.util.function.Consumer;
import static com.mongodb.client.model.Filters.lt;
import static java.lang.System.currentTimeMillis;
import static org.apache.aries.events.mongo.Common.Fields.INDEX;
import static org.apache.aries.events.mongo.Common.Fields.PAYLOAD;
import static org.apache.aries.events.mongo.Common.Fields.PROPS;
import static org.apache.aries.events.mongo.Common.Fields.TIME_STAMP;
import static org.apache.aries.events.mongo.Common.upcomingIndex;
import static org.slf4j.LoggerFactory.getLogger;
final class MessageSenderImpl implements MessageSender {
//*********************************************
// Creation
//*********************************************
static MessageSender messageSender(MongoCollection<Document> col, long maxAge) {
return new MessageSenderImpl(col, maxAge);
}
//*********************************************
// Specialization
//*********************************************
@Override
public void send(Message message) {
publish1(message, 3);
evict();
}
@Override
public void close() {}
//*********************************************
// Internals
//*********************************************
private static final Logger LOGGER = getLogger(MessageSenderImpl.class);
private final MongoCollection<Document> collection;
private long nextEvictionTime = 0L;
private final long maxAge;
private MessageSenderImpl(MongoCollection<Document> collection, long maxAge) {
LOGGER.debug("Creating new publisher: " + collection.getNamespace().getCollectionName());
ensureIndexes(collection);
this.collection = collection;
this.maxAge = maxAge;
}
private void evict() {
long currentTime = currentTimeMillis();
if (currentTime > nextEvictionTime) {
doEvict(currentTime - maxAge);
nextEvictionTime = oldestTimeStamp() + maxAge;
}
}
/**
* Deletes documents that are older then specified threshold but preserving at least one document.
* At least one document is needed in the collection in order to keep track of the oldest time stamp.
* @param threshold time threshold (ms). Documents older then specified by threshold are removed.
*/
private void doEvict(long threshold) {
collection.find()
.projection(new Document(TIME_STAMP, 1))
.sort(new Document(TIME_STAMP, -1))
.limit(1)
.forEach((Consumer<Document>) doc -> {
long newestTimeStamp = timeStamp(doc);
long adjustedThreshold = Math.min(threshold, newestTimeStamp);
collection.deleteMany(lt(TIME_STAMP, adjustedThreshold));
});
}
private void publish1(Message message, int retry) {
try {
long index = upcomingIndex(collection);
collection.insertOne(createDoc(index, message));
} catch (MongoWriteException e) {
if (retry > 0) {
publish1(message, retry - 1);
} else {
throw e;
}
}
}
private long oldestTimeStamp() {
try (MongoCursor<Document> docs = collection.find().sort(new Document(TIME_STAMP, 1)).iterator()) {
return docs.hasNext() ? docs.next().get(TIME_STAMP, Long.class) : 0L;
}
}
private static long timeStamp(Document doc) {
return doc.get(TIME_STAMP, Long.class);
}
private Document createDoc(long index, Message message) {
Document result = new Document();
result.put(INDEX, index);
result.put(TIME_STAMP, currentTimeMillis());
result.put(PAYLOAD, message.getPayload());
result.put(PROPS, message.getProperties());
return result;
}
private void ensureIndexes(MongoCollection<Document> col) {
col.createIndex(new Document(INDEX, 1), new IndexOptions().unique(true));
}
}
| 8,894 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events/api/Message.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.api;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.unmodifiableMap;
import static java.util.Objects.requireNonNull;
/**
* TODO If we allow wild card consumption then a message also needs a topic
*/
public final class Message {
private final byte[] payload;
private final Map<String, String> properties;
public Message(byte[] payload, Map<String, String> properties) {
requireNonNull(payload);
requireNonNull(properties);
this.payload = payload.clone();
this.properties = unmodifiableMap(new HashMap<>(properties));
}
public byte[] getPayload() {
return payload.clone();
}
public Map<String, String> getProperties() {
return properties;
}
@Override
public String toString() {
return "Message" + properties;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Message message = (Message) o;
return Arrays.equals(payload, message.payload) &&
properties.equals(message.properties);
}
@Override
public int hashCode() {
int result = Objects.hash(properties);
result = 31 * result + Arrays.hashCode(payload);
return result;
}
}
| 8,895 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events/api/Type.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.api;
public @interface Type {
String value();
}
| 8,896 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events/api/Position.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.api;
/**
* Position in a the topic.
* E.g. For a kafka implementation this would be a list of (partition, offset) as we do not support partitions
* this could simply be like an offset.
*
* The {@code Position} positions are ordered. The relative order between
* two positions can be computed by invoking {@code Comparable#compareTo}.
* Comparing this position with a specified position will return a negative
* integer, zero, or a positive integer as this position happened before,
* happened concurrently, or happened after the specified position.
*/
public interface Position extends Comparable<Position> {
String positionToString();
}
| 8,897 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events/api/SubscribeRequestBuilder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.api;
import static java.util.Objects.requireNonNull;
import java.util.function.Consumer;
import javax.annotation.ParametersAreNonnullByDefault;
@ParametersAreNonnullByDefault
public final class SubscribeRequestBuilder {
private SubscribeRequest subscribeRequest;
private SubscribeRequestBuilder(SubscribeRequest subscribeRequest) {
this.subscribeRequest = subscribeRequest;
}
/**
* Build a subscription request for the given topic and {@code Consumer} callback.
*
* @param topic to consume from
* @param callback to be invoked for each message consumed
* @return a new subscription request
*/
public static SubscribeRequestBuilder to(String topic, Consumer<Received> callback) {
return new SubscribeRequestBuilder(new SubscribeRequest(topic, callback));
}
/**
* Set the {@code Position} position to start consuming from.
*
* @param position in the topic to start consuming from
* @return the updated subscribe request
*/
public SubscribeRequestBuilder startAt(Position position) {
this.subscribeRequest.position = position;
return this;
}
/**
* Set the earliest or latest position to start consuming from
* when the position is {@code null} or not valid. By default,
* seek is set to {@link Seek#latest}.
*
* @param seek where to start consuming when no valid position is specified
* @return the updated subscribe request
*/
public SubscribeRequestBuilder seek(Seek seek) {
this.subscribeRequest.seek = requireNonNull(seek, "Seek must not be null");
return this;
}
public SubscribeRequest build() {
return subscribeRequest;
}
public static class SubscribeRequest {
private final String topic;
private final Consumer<Received> callback;
private Position position;
private Seek seek = Seek.latest;
private SubscribeRequest(String topic, Consumer<Received> callback) {
this.topic = topic;
this.callback = callback;
}
public String getTopic() {
return topic;
}
public Position getPosition() {
return position;
}
public Seek getSeek() {
return seek;
}
public Consumer<Received> getCallback() {
return callback;
}
}
}
| 8,898 |
0 |
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events
|
Create_ds/aries-journaled-events/org.apache.aries.events.api/src/main/java/org/apache/aries/events/api/Received.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.aries.events.api;
public final class Received {
private Position position;
private Message message;
public Received(Position position, Message message) {
this.position = position;
this.message = message;
}
public Position getPosition() {
return position;
}
public Message getMessage() {
return message;
}
}
| 8,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.