method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public void testFormatDate() {
Calendar date = Dates.DateValue("04/21/2008 09:29:48");
assertEquals("Apr 21, 2008", Dates.FormatDate(date));
}
| void function() { Calendar date = Dates.DateValue(STR); assertEquals(STR, Dates.FormatDate(date)); } | /**
* Tests {@link Dates#FormatDate(Calendar)}.
*/ | Tests <code>Dates#FormatDate(Calendar)</code> | testFormatDate | {
"repo_name": "ajhalbleib/aicg",
"path": "appinventor/components/tests/com/google/appinventor/components/runtime/util/DatesTest.java",
"license": "mit",
"size": 9544
} | [
"java.util.Calendar"
] | import java.util.Calendar; | import java.util.*; | [
"java.util"
] | java.util; | 646,502 |
private Optional<Integer> findIdentifier(int start, ImmutableSet<String> identifiers) {
for (int i = start; i < toks.size(); i++) {
if (isIdentifierToken(i)) {
String id = tokenAt(i);
if (identifiers.contains(id)) {
return Optional.of(i);
}
}
}
return Optional.absent();
} | Optional<Integer> function(int start, ImmutableSet<String> identifiers) { for (int i = start; i < toks.size(); i++) { if (isIdentifierToken(i)) { String id = tokenAt(i); if (identifiers.contains(id)) { return Optional.of(i); } } } return Optional.absent(); } | /**
* Returns the index of the first place where one of the given identifiers occurs, or {@code
* Optional.absent()} if there is none.
*
* @param start the index to start looking at
* @param identifiers the identifiers to look for
*/ | Returns the index of the first place where one of the given identifiers occurs, or Optional.absent() if there is none | findIdentifier | {
"repo_name": "tranleduy2000/javaide",
"path": "lib-google-java-format/src/main/java/com/google/googlejavaformat/java/ImportOrderer.java",
"license": "gpl-3.0",
"size": 14447
} | [
"com.google.common.base.Optional",
"com.google.common.collect.ImmutableSet"
] | import com.google.common.base.Optional; import com.google.common.collect.ImmutableSet; | import com.google.common.base.*; import com.google.common.collect.*; | [
"com.google.common"
] | com.google.common; | 2,383,110 |
public native Bitmap getCurrentFrame(); | native Bitmap function(); | /**
* Get the current video frame
*
* @return bitmap object
*/ | Get the current video frame | getCurrentFrame | {
"repo_name": "liaozhoubei/NetEasyNews",
"path": "library/InitActivity/src/io/vov/vitamio/MediaPlayer.java",
"license": "gpl-3.0",
"size": 57992
} | [
"android.graphics.Bitmap"
] | import android.graphics.Bitmap; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 2,360,417 |
return _return;
}
/**
* Define el valor de la propiedad return.
*
* @param value
* allowed object is
* {@link ItemResponse } | return _return; } /** * Define el valor de la propiedad return. * * @param value * allowed object is * {@link ItemResponse } | /**
* Obtiene el valor de la propiedad return.
*
* @return
* possible object is
* {@link ItemResponse }
*
*/ | Obtiene el valor de la propiedad return | getReturn | {
"repo_name": "rlviana/pricegrabber-app",
"path": "pricegrabber-ws/pricegrabber-ws-impl/src/main/java/net/rlviana/pricegrabber/ws/service/item/UpdateItemResponse.java",
"license": "mit",
"size": 1796
} | [
"net.rlviana.pricegrabber.domain.response.core.ItemResponse"
] | import net.rlviana.pricegrabber.domain.response.core.ItemResponse; | import net.rlviana.pricegrabber.domain.response.core.*; | [
"net.rlviana.pricegrabber"
] | net.rlviana.pricegrabber; | 283,539 |
@Test
public void testDrawWithNullInfo() {
BufferedImage image = new BufferedImage(200 , 100,
BufferedImage.TYPE_INT_RGB);
Graphics2D g2 = image.createGraphics();
this.chart.draw(g2, new Rectangle2D.Double(0, 0, 200, 100), null,
null);
g2.dispose();
//FIXME we should really assert a value here
}
| void function() { BufferedImage image = new BufferedImage(200 , 100, BufferedImage.TYPE_INT_RGB); Graphics2D g2 = image.createGraphics(); this.chart.draw(g2, new Rectangle2D.Double(0, 0, 200, 100), null, null); g2.dispose(); } | /**
* Draws the chart with a null info object to make sure that no exceptions
* are thrown (a problem that was occurring at one point).
*/ | Draws the chart with a null info object to make sure that no exceptions are thrown (a problem that was occurring at one point) | testDrawWithNullInfo | {
"repo_name": "oskopek/jfreechart-fse",
"path": "src/test/java/org/jfree/chart/StackedBarChartTest.java",
"license": "lgpl-2.1",
"size": 6680
} | [
"java.awt.Graphics2D",
"java.awt.geom.Rectangle2D",
"java.awt.image.BufferedImage"
] | import java.awt.Graphics2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; | import java.awt.*; import java.awt.geom.*; import java.awt.image.*; | [
"java.awt"
] | java.awt; | 1,974,982 |
private void flushInternal() throws IOException {
long toWaitFor;
synchronized (this) {
dfsClient.checkOpen();
checkClosed();
//
// If there is data in the current buffer, send it across
//
queueCurrentPacket();
toWaitFor = lastQueuedSeqno;
}
waitForAckedSeqno(toWaitFor);
} | void function() throws IOException { long toWaitFor; synchronized (this) { dfsClient.checkOpen(); checkClosed(); toWaitFor = lastQueuedSeqno; } waitForAckedSeqno(toWaitFor); } | /**
* Waits till all existing data is flushed and confirmations
* received from datanodes.
*/ | Waits till all existing data is flushed and confirmations received from datanodes | flushInternal | {
"repo_name": "tomatoKiller/Hadoop_Source_Learn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java",
"license": "apache-2.0",
"size": 72280
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 453,434 |
protected String normalizePropertyName(String propertyName) {
// TODO normalize the path (remove superfluous ".." and "."
// where possible)
if (query == null) {
return propertyName;
}
if (propertyName == null) {
return null;
}
int slash = propertyName.indexOf('/');
if (slash < 0) {
return normalizeNonRelativePropertyName(propertyName);
}
// relative properties
String relativePath = PathUtils.getParentPath(propertyName);
if (relativePath.indexOf('*') >= 0) {
StringBuilder buff = new StringBuilder();
for (String p : PathUtils.elements(relativePath)) {
if (!p.equals("*")) {
p = query.getOakPath(p);
}
if (p.length() > 0) {
if (buff.length() > 0) {
buff.append('/');
}
buff.append(p);
}
}
relativePath = buff.toString();
} else {
relativePath = query.getOakPath(relativePath);
}
propertyName = PathUtils.getName(propertyName);
propertyName = normalizeNonRelativePropertyName(propertyName);
return PathUtils.concat(relativePath, propertyName);
} | String function(String propertyName) { if (query == null) { return propertyName; } if (propertyName == null) { return null; } int slash = propertyName.indexOf('/'); if (slash < 0) { return normalizeNonRelativePropertyName(propertyName); } String relativePath = PathUtils.getParentPath(propertyName); if (relativePath.indexOf('*') >= 0) { StringBuilder buff = new StringBuilder(); for (String p : PathUtils.elements(relativePath)) { if (!p.equals("*")) { p = query.getOakPath(p); } if (p.length() > 0) { if (buff.length() > 0) { buff.append('/'); } buff.append(p); } } relativePath = buff.toString(); } else { relativePath = query.getOakPath(relativePath); } propertyName = PathUtils.getName(propertyName); propertyName = normalizeNonRelativePropertyName(propertyName); return PathUtils.concat(relativePath, propertyName); } | /**
* Normalize the property name (including namespace remapping).
* Asterisks are kept.
*
* @param propertyName the property name to normalize
* @return the normalized (oak-) property name
*/ | Normalize the property name (including namespace remapping). Asterisks are kept | normalizePropertyName | {
"repo_name": "mduerig/jackrabbit-oak",
"path": "oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElement.java",
"license": "apache-2.0",
"size": 5092
} | [
"org.apache.jackrabbit.oak.commons.PathUtils"
] | import org.apache.jackrabbit.oak.commons.PathUtils; | import org.apache.jackrabbit.oak.commons.*; | [
"org.apache.jackrabbit"
] | org.apache.jackrabbit; | 1,405,765 |
public synchronized <T extends Drawable> ArrayList<T> getDrawables(Class<T> c) {
ArrayList<T> list = super.getDrawables(c);
list.remove(plot);
return list;
} | synchronized <T extends Drawable> ArrayList<T> function(Class<T> c) { ArrayList<T> list = super.getDrawables(c); list.remove(plot); return list; } | /**
* Gets Drawable objects added by the user of an assignable type. The list contains
* objects that are assignable from the class or interface.
*
* @param c the type of Drawable object
*
* @return the cloned list
*
* @see #getObjectOfClass(Class c)
*/ | Gets Drawable objects added by the user of an assignable type. The list contains objects that are assignable from the class or interface | getDrawables | {
"repo_name": "fschuett/osp",
"path": "src/org/opensourcephysics/frames/Vector2DFrame.java",
"license": "gpl-3.0",
"size": 13078
} | [
"java.util.ArrayList",
"org.opensourcephysics.display.Drawable"
] | import java.util.ArrayList; import org.opensourcephysics.display.Drawable; | import java.util.*; import org.opensourcephysics.display.*; | [
"java.util",
"org.opensourcephysics.display"
] | java.util; org.opensourcephysics.display; | 1,166,119 |
public void setHorizontalAlignment(HorizontalAlignment alignment) {
if (alignment == null) {
throw new IllegalArgumentException("Null 'alignment' argument.");
}
if (this.horizontalAlignment != alignment) {
this.horizontalAlignment = alignment;
notifyListeners(new TitleChangeEvent(this));
}
}
| void function(HorizontalAlignment alignment) { if (alignment == null) { throw new IllegalArgumentException(STR); } if (this.horizontalAlignment != alignment) { this.horizontalAlignment = alignment; notifyListeners(new TitleChangeEvent(this)); } } | /**
* Sets the horizontal alignment for the title and sends a
* {@link TitleChangeEvent} to all registered listeners.
*
* @param alignment the horizontal alignment (<code>null</code> not
* permitted).
*/ | Sets the horizontal alignment for the title and sends a <code>TitleChangeEvent</code> to all registered listeners | setHorizontalAlignment | {
"repo_name": "nologic/nabs",
"path": "client/trunk/shared/libraries/jfreechart-1.0.5/source/org/jfree/chart/title/Title.java",
"license": "gpl-2.0",
"size": 15663
} | [
"org.jfree.chart.event.TitleChangeEvent",
"org.jfree.ui.HorizontalAlignment"
] | import org.jfree.chart.event.TitleChangeEvent; import org.jfree.ui.HorizontalAlignment; | import org.jfree.chart.event.*; import org.jfree.ui.*; | [
"org.jfree.chart",
"org.jfree.ui"
] | org.jfree.chart; org.jfree.ui; | 1,048,425 |
boolean collect(Term term, TermDocs tDocs) throws IOException;
}
private final static class NodeInfo {
final int docId;
final NodeId id;
NodeId parent;
public NodeInfo(int docId, NodeId id) {
this.docId = docId;
this.id = id;
}
} | boolean collect(Term term, TermDocs tDocs) throws IOException; } private final static class NodeInfo { final int docId; final NodeId id; NodeId parent; public NodeInfo(int docId, NodeId id) { this.docId = docId; this.id = id; } } | /**
* Called for each term encountered.
*
* @param term the term.
* @param tDocs the term docs of <code>term</code>.
* @return false if the collector does not wish to collect more TermDocs.
* @throws IOException if an error occurs while reading from the index.
*/ | Called for each term encountered | collect | {
"repo_name": "sdmcraft/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/CachingIndexReader.java",
"license": "apache-2.0",
"size": 27195
} | [
"java.io.IOException",
"org.apache.jackrabbit.core.id.NodeId",
"org.apache.lucene.index.Term",
"org.apache.lucene.index.TermDocs"
] | import java.io.IOException; import org.apache.jackrabbit.core.id.NodeId; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermDocs; | import java.io.*; import org.apache.jackrabbit.core.id.*; import org.apache.lucene.index.*; | [
"java.io",
"org.apache.jackrabbit",
"org.apache.lucene"
] | java.io; org.apache.jackrabbit; org.apache.lucene; | 799,869 |
public String search() {
return NavigationResults.SEARCH;
} | String function() { return NavigationResults.SEARCH; } | /**
* Go to search page
*
* @return forward to SEARCH page
*/ | Go to search page | search | {
"repo_name": "terrex/tntconcept-materials-testing",
"path": "src/main/java/com/autentia/intra/bean/account/AccountEntryTypeBean.java",
"license": "gpl-2.0",
"size": 17632
} | [
"com.autentia.intra.bean.NavigationResults"
] | import com.autentia.intra.bean.NavigationResults; | import com.autentia.intra.bean.*; | [
"com.autentia.intra"
] | com.autentia.intra; | 1,495,130 |
public T withConfigs(@Nullable final Set<String> configs) {
this.bConfigs.clear();
if (configs != null) {
this.bConfigs.addAll(configs);
}
return (T) this;
} | T function(@Nullable final Set<String> configs) { this.bConfigs.clear(); if (configs != null) { this.bConfigs.addAll(configs); } return (T) this; } | /**
* The configs to use with the resource if desired.
*
* @param configs The configuration file locations
* @return The builder
*/ | The configs to use with the resource if desired | withConfigs | {
"repo_name": "Netflix/genie",
"path": "genie-common/src/main/java/com/netflix/genie/common/dto/ExecutionEnvironmentDTO.java",
"license": "apache-2.0",
"size": 4462
} | [
"java.util.Set",
"javax.annotation.Nullable"
] | import java.util.Set; import javax.annotation.Nullable; | import java.util.*; import javax.annotation.*; | [
"java.util",
"javax.annotation"
] | java.util; javax.annotation; | 363,195 |
public static Symbolizer[] symbolizers(Rule rule) {
Set<Symbolizer> symbolizers = new HashSet<>();
symbolizers.addAll(rule.symbolizers());
if (symbolizers.isEmpty()) {
return new Symbolizer[0];
} else {
return symbolizers.toArray(new Symbolizer[0]);
}
} | static Symbolizer[] function(Rule rule) { Set<Symbolizer> symbolizers = new HashSet<>(); symbolizers.addAll(rule.symbolizers()); if (symbolizers.isEmpty()) { return new Symbolizer[0]; } else { return symbolizers.toArray(new Symbolizer[0]); } } | /**
* Retrieves all symbolizers defined in a rule
*
* @param rule the rule
* @return an array of unique symbolizers
*/ | Retrieves all symbolizers defined in a rule | symbolizers | {
"repo_name": "geotools/geotools",
"path": "modules/library/main/src/main/java/org/geotools/styling/SLD.java",
"license": "lgpl-2.1",
"size": 67749
} | [
"java.util.HashSet",
"java.util.Set"
] | import java.util.HashSet; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 2,664,280 |
@Test()
public void testSetCharacterArrayNonEmpty()
throws Exception
{
ByteStringBuffer buffer = new ByteStringBuffer().append("foo");
assertFalse(buffer.isEmpty());
assertEquals(buffer.length(), 3);
assertEquals(buffer.toString(), "foo");
buffer.set(new char[] { 'b', 'a', 'r' });
assertFalse(buffer.isEmpty());
assertEquals(buffer.length(), 3);
assertEquals(buffer.toString(), "bar");
buffer.hashCode();
} | @Test() void function() throws Exception { ByteStringBuffer buffer = new ByteStringBuffer().append("foo"); assertFalse(buffer.isEmpty()); assertEquals(buffer.length(), 3); assertEquals(buffer.toString(), "foo"); buffer.set(new char[] { 'b', 'a', 'r' }); assertFalse(buffer.isEmpty()); assertEquals(buffer.length(), 3); assertEquals(buffer.toString(), "bar"); buffer.hashCode(); } | /**
* Provides test coverage for the {@code set} method variant that takes a
* character array with a non-empty array.
*
* @throws Exception If an unexpected problem occurs.
*/ | Provides test coverage for the set method variant that takes a character array with a non-empty array | testSetCharacterArrayNonEmpty | {
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/util/ByteStringBufferTestCase.java",
"license": "gpl-2.0",
"size": 141047
} | [
"org.testng.annotations.Test"
] | import org.testng.annotations.Test; | import org.testng.annotations.*; | [
"org.testng.annotations"
] | org.testng.annotations; | 2,813,238 |
if(list.size() < 1) {
return null;
}
List<List<BigDecimal>> chunks = new ArrayList<List<BigDecimal>>();
for(int i = 0; i < list.size(); i += p) {
int end = i + p >= list.size() ? list.size(): i + p;
chunks.add(list.subList(i, end));
}
return chunks;
}; | if(list.size() < 1) { return null; } List<List<BigDecimal>> chunks = new ArrayList<List<BigDecimal>>(); for(int i = 0; i < list.size(); i += p) { int end = i + p >= list.size() ? list.size(): i + p; chunks.add(list.subList(i, end)); } return chunks; }; | /**
* Splits the list into chunks.
* @param List<BigDecimal> the list
* @param int the chunk size
* @param MathContext the math context
* @return the result
*/ | Splits the list into chunks | calculate | {
"repo_name": "jessemull/MicroFlex",
"path": "src/main/java/com/github/jessemull/microflex/bigdecimalflex/stat/ChunksBigDecimal.java",
"license": "apache-2.0",
"size": 5917
} | [
"java.math.BigDecimal",
"java.util.ArrayList",
"java.util.List"
] | import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; | import java.math.*; import java.util.*; | [
"java.math",
"java.util"
] | java.math; java.util; | 2,155,083 |
public void replacePageFragment(Fragment fragment, int index); | void function(Fragment fragment, int index); | /**
* Replace page fragment at specific index to new.
*
* @param fragment New page fragment
* @param index Replace index
*/ | Replace page fragment at specific index to new | replacePageFragment | {
"repo_name": "kakajika/MutablePagerAdapter",
"path": "lib/src/main/java/com/labo/kaji/mutablepageradapter/MutablePageControl.java",
"license": "mit",
"size": 1851
} | [
"android.support.v4.app.Fragment"
] | import android.support.v4.app.Fragment; | import android.support.v4.app.*; | [
"android.support"
] | android.support; | 2,058,985 |
try {
return getXml(new ByteArrayInputStream(xml.getBytes(encoding)), encoding);
} catch(UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
| try { return getXml(new ByteArrayInputStream(xml.getBytes(encoding)), encoding); } catch(UnsupportedEncodingException e) { throw new RuntimeException(e); } } | /**
* Parse an XML string as DOM.
*
* @param xml The serialized XML
* @param encoding the encoding to use
* @return The resulting XML document
*/ | Parse an XML string as DOM | getXml | {
"repo_name": "daisy/pipeline-issues",
"path": "clientlib/java/clientlib-java/src/main/java/org/daisy/pipeline/client/utils/XML.java",
"license": "apache-2.0",
"size": 4292
} | [
"java.io.ByteArrayInputStream",
"java.io.UnsupportedEncodingException"
] | import java.io.ByteArrayInputStream; import java.io.UnsupportedEncodingException; | import java.io.*; | [
"java.io"
] | java.io; | 1,625,432 |
public CreateSubCertResponse createClientCert(CreateSubCertRequest request, String clientToken) {
return createSubCert(request, clientToken, CertType.CLIENT);
} | CreateSubCertResponse function(CreateSubCertRequest request, String clientToken) { return createSubCert(request, clientToken, CertType.CLIENT); } | /**
* Create a client cert.
*
* @param request The request object containing all options for creating a client cert.
* @param clientToken A random string to make request idempotent.
* @return The created client cert ID.
*/ | Create a client cert | createClientCert | {
"repo_name": "baidubce/bce-sdk-java",
"path": "src/main/java/com/baidubce/services/iothisk/IotPkiManageClient.java",
"license": "apache-2.0",
"size": 23247
} | [
"com.baidubce.services.iothisk.model.CreateSubCertRequest",
"com.baidubce.services.iothisk.model.CreateSubCertResponse"
] | import com.baidubce.services.iothisk.model.CreateSubCertRequest; import com.baidubce.services.iothisk.model.CreateSubCertResponse; | import com.baidubce.services.iothisk.model.*; | [
"com.baidubce.services"
] | com.baidubce.services; | 924,237 |
@Override
public void println(boolean x) throws IOException {
print(x);
println();
}
| void function(boolean x) throws IOException { print(x); println(); } | /**
* Print a boolean value and then terminate the line. This method behaves
* as though it invokes <code>{@link #print(boolean)}</code> and then
* <code>{@link #println()}</code>.
*
* @throws IOException
*/ | Print a boolean value and then terminate the line. This method behaves as though it invokes <code><code>#print(boolean)</code></code> and then <code><code>#println()</code></code> | println | {
"repo_name": "pistolove/sourcecode4junit",
"path": "Source4Tomcat/src/org/apache/jasper/runtime/BodyContentImpl.java",
"license": "apache-2.0",
"size": 20825
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,534,901 |
private void override(TypeDescriptor other) throws BinaryObjectException {
assert clsName.equals(other.clsName);
if (canOverride) {
mapper = other.mapper;
serializer = other.serializer;
affKeyFieldName = other.affKeyFieldName;
isEnum = other.isEnum;
canOverride = other.canOverride;
}
else if (!other.canOverride)
throw new BinaryObjectException("Duplicate explicit class definition in configuration: " + clsName);
}
}
static class Type {
private final int id;
private final boolean registered;
public Type(int id, boolean registered) {
this.id = id;
this.registered = registered;
} | void function(TypeDescriptor other) throws BinaryObjectException { assert clsName.equals(other.clsName); if (canOverride) { mapper = other.mapper; serializer = other.serializer; affKeyFieldName = other.affKeyFieldName; isEnum = other.isEnum; canOverride = other.canOverride; } else if (!other.canOverride) throw new BinaryObjectException(STR + clsName); } } static class Type { private final int id; private final boolean registered; public Type(int id, boolean registered) { this.id = id; this.registered = registered; } | /**
* Override binary class descriptor.
*
* @param other Other descriptor.
* @throws BinaryObjectException If failed.
*/ | Override binary class descriptor | override | {
"repo_name": "agura/incubator-ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryContext.java",
"license": "apache-2.0",
"size": 41784
} | [
"org.apache.ignite.binary.BinaryObjectException"
] | import org.apache.ignite.binary.BinaryObjectException; | import org.apache.ignite.binary.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 2,832,743 |
void createMembers(Set<MemberRequest> requests) throws AmbariException;
// ----- Read ------------------------------------------------------------- | void createMembers(Set<MemberRequest> requests) throws AmbariException; | /**
* Creates members of the group.
*
* @param requests the request objects which define the members.
*
* @throws AmbariException when the members cannot be created.
*/ | Creates members of the group | createMembers | {
"repo_name": "radicalbit/ambari",
"path": "ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java",
"license": "apache-2.0",
"size": 30426
} | [
"java.util.Set",
"org.apache.ambari.server.AmbariException"
] | import java.util.Set; import org.apache.ambari.server.AmbariException; | import java.util.*; import org.apache.ambari.server.*; | [
"java.util",
"org.apache.ambari"
] | java.util; org.apache.ambari; | 935,827 |
public void verifyChecksumAlgorithm(ChecksumSpecTYPE checksumSpec, String collectionID)
throws RequestHandlerException{
if(checksumSpec == null) {
return;
}
// Validate against ChecksumPillar specific algorithm (if is a ChecksumPillar).
if(getChecksumPillarSpec() != null && !(getChecksumPillarSpec().equals(checksumSpec))) {
throw new InvalidMessageException(ResponseCode.REQUEST_NOT_SUPPORTED, "Cannot handle the checksum "
+ "specification '" + checksumSpec + "'.This checksum pillar can only handle '"
+ getChecksumPillarSpec() + "'");
}
try {
ChecksumUtils.verifyAlgorithm(checksumSpec);
} catch (NoSuchAlgorithmException e) {
throw new InvalidMessageException(ResponseCode.REQUEST_NOT_UNDERSTOOD_FAILURE, e.getMessage(), e);
}
} | void function(ChecksumSpecTYPE checksumSpec, String collectionID) throws RequestHandlerException{ if(checksumSpec == null) { return; } if(getChecksumPillarSpec() != null && !(getChecksumPillarSpec().equals(checksumSpec))) { throw new InvalidMessageException(ResponseCode.REQUEST_NOT_SUPPORTED, STR + STR + checksumSpec + STR + getChecksumPillarSpec() + "'"); } try { ChecksumUtils.verifyAlgorithm(checksumSpec); } catch (NoSuchAlgorithmException e) { throw new InvalidMessageException(ResponseCode.REQUEST_NOT_UNDERSTOOD_FAILURE, e.getMessage(), e); } } | /**
* Verifies the handling of a specific checksum algorithm.
*
* @param checksumSpec The checksum specification to verify.
* @param collectionID The ID for the collection.
* @throws RequestHandlerException If the checksum specification is not supported.
*/ | Verifies the handling of a specific checksum algorithm | verifyChecksumAlgorithm | {
"repo_name": "bitrepository/reference",
"path": "bitrepository-reference-pillar/src/main/java/org/bitrepository/pillar/store/StorageModel.java",
"license": "lgpl-2.1",
"size": 21409
} | [
"java.security.NoSuchAlgorithmException",
"org.bitrepository.bitrepositoryelements.ChecksumSpecTYPE",
"org.bitrepository.bitrepositoryelements.ResponseCode",
"org.bitrepository.common.utils.ChecksumUtils",
"org.bitrepository.service.exception.InvalidMessageException",
"org.bitrepository.service.exception.RequestHandlerException"
] | import java.security.NoSuchAlgorithmException; import org.bitrepository.bitrepositoryelements.ChecksumSpecTYPE; import org.bitrepository.bitrepositoryelements.ResponseCode; import org.bitrepository.common.utils.ChecksumUtils; import org.bitrepository.service.exception.InvalidMessageException; import org.bitrepository.service.exception.RequestHandlerException; | import java.security.*; import org.bitrepository.bitrepositoryelements.*; import org.bitrepository.common.utils.*; import org.bitrepository.service.exception.*; | [
"java.security",
"org.bitrepository.bitrepositoryelements",
"org.bitrepository.common",
"org.bitrepository.service"
] | java.security; org.bitrepository.bitrepositoryelements; org.bitrepository.common; org.bitrepository.service; | 720,434 |
Path path(); | Path path(); | /**
* Returns the path of the tunnel.
*
* @return the path of the tunnel
*/ | Returns the path of the tunnel | path | {
"repo_name": "planoAccess/clonedONOS",
"path": "incubator/api/src/main/java/org/onosproject/incubator/net/tunnel/TunnelDescription.java",
"license": "apache-2.0",
"size": 2037
} | [
"org.onosproject.net.Path"
] | import org.onosproject.net.Path; | import org.onosproject.net.*; | [
"org.onosproject.net"
] | org.onosproject.net; | 2,484,570 |
@Override
public Boolean visitNoScoping() {
if (processNoScope) {
cleanupActions.computeIfAbsent(injectee, i->{
LOGGER.debug("predestroy action registered for unscoped instance {} from {}", i, context);
return new UnscopedCleanupAction(context, lifecycleActions);
});
}
return true;
}
}
private static final class UnscopedCleanupAction implements LifecycleAction, Comparable<UnscopedCleanupAction> {
private volatile static long instanceCounter = 0;
private final long ordinal;
private final Object context;
private final Iterable<LifecycleAction> lifecycleActions;
public UnscopedCleanupAction(Object context, Iterable<LifecycleAction> lifecycleActions) {
this.context = context;
this.lifecycleActions = lifecycleActions;
this.ordinal = instanceCounter++;
} | Boolean function() { if (processNoScope) { cleanupActions.computeIfAbsent(injectee, i->{ LOGGER.debug(STR, i, context); return new UnscopedCleanupAction(context, lifecycleActions); }); } return true; } } private static final class UnscopedCleanupAction implements LifecycleAction, Comparable<UnscopedCleanupAction> { private volatile static long instanceCounter = 0; private final long ordinal; private final Object context; private final Iterable<LifecycleAction> lifecycleActions; public UnscopedCleanupAction(Object context, Iterable<LifecycleAction> lifecycleActions) { this.context = context; this.lifecycleActions = lifecycleActions; this.ordinal = instanceCounter++; } | /**
* handle unscoped instances here using a 'best effort' strategy to clean up these instances iff they
* still exist when the PreDestroyMonitor is closed.
*/ | handle unscoped instances here using a 'best effort' strategy to clean up these instances iff they still exist when the PreDestroyMonitor is closed | visitNoScoping | {
"repo_name": "Netflix/governator",
"path": "governator-core/src/main/java/com/netflix/governator/internal/PreDestroyMonitor.java",
"license": "apache-2.0",
"size": 16656
} | [
"com.netflix.governator.LifecycleAction"
] | import com.netflix.governator.LifecycleAction; | import com.netflix.governator.*; | [
"com.netflix.governator"
] | com.netflix.governator; | 917,433 |
void removeVetoableChangeListener(String name, VetoableChangeListener vcl); | void removeVetoableChangeListener(String name, VetoableChangeListener vcl); | /**
* Removes a <code>VetoableChangeListener</code> from this
* <code>BeanContextChild</code> so that it no longer receives
* events when the specified property changes.
* @param name the name of the property that was listened on.
* @param vcl the <code>VetoableChangeListener</code> to remove.
*/ | Removes a <code>VetoableChangeListener</code> from this <code>BeanContextChild</code> so that it no longer receives events when the specified property changes | removeVetoableChangeListener | {
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/java/beans/beancontext/BeanContextChild.java",
"license": "apache-2.0",
"size": 4476
} | [
"java.beans.VetoableChangeListener"
] | import java.beans.VetoableChangeListener; | import java.beans.*; | [
"java.beans"
] | java.beans; | 1,555,849 |
public void copyToBuffer(IntBuffer ib) {
ib.put(mBuffer, 0, mSize);
ib.rewind();
} | void function(IntBuffer ib) { ib.put(mBuffer, 0, mSize); ib.rewind(); } | /**
* Copies the contents of the list to the specified IntBuffer.
*
* @param ib
* target IntBuffer
*/ | Copies the contents of the list to the specified IntBuffer | copyToBuffer | {
"repo_name": "fabmax/LightGL",
"path": "LightGlLib/src/main/java/de/fabmax/lightgl/util/IntList.java",
"license": "apache-2.0",
"size": 5704
} | [
"java.nio.IntBuffer"
] | import java.nio.IntBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 1,468,255 |
return Dispatch.get(this, "Count").toInt();
} | return Dispatch.get(this, "Count").toInt(); } | /**
* Wrapper for calling the ActiveX-Method with input-parameter(s).
*
* @return the result is of type int
*/ | Wrapper for calling the ActiveX-Method with input-parameter(s) | getCount | {
"repo_name": "cpesch/MetaMusic",
"path": "itunes-com-library/src/main/java/slash/metamusic/itunes/com/binding/IITWindowCollection.java",
"license": "gpl-2.0",
"size": 2104
} | [
"com.jacob.com.Dispatch"
] | import com.jacob.com.Dispatch; | import com.jacob.com.*; | [
"com.jacob.com"
] | com.jacob.com; | 603,503 |
@Test
public void testResultSet() throws SQLException {
try {
conn = new DBConnection(connectionString);
stmt = conn.createStatement();
String query = "SELECT * FROM " + table1.getEscapedTableName();
rs = stmt.executeQuery(query);
// verify resultSet
rs.verify(table1);
}
finally {
terminateVariation();
}
} | void function() throws SQLException { try { conn = new DBConnection(connectionString); stmt = conn.createStatement(); String query = STR + table1.getEscapedTableName(); rs = stmt.executeQuery(query); rs.verify(table1); } finally { terminateVariation(); } } | /**
* Verify resultset using ResultSetMetaData
*
* @throws SQLException
*/ | Verify resultset using ResultSetMetaData | testResultSet | {
"repo_name": "v-nisidh/mssql-jdbc",
"path": "src/test/java/com/microsoft/sqlserver/jdbc/bvt/bvtTest.java",
"license": "mit",
"size": 18134
} | [
"com.microsoft.sqlserver.testframework.DBConnection",
"java.sql.SQLException"
] | import com.microsoft.sqlserver.testframework.DBConnection; import java.sql.SQLException; | import com.microsoft.sqlserver.testframework.*; import java.sql.*; | [
"com.microsoft.sqlserver",
"java.sql"
] | com.microsoft.sqlserver; java.sql; | 253,567 |
public Message getTitle(); | Message function(); | /**
* Gets the title of this inventory
*
* @return
*/ | Gets the title of this inventory | getTitle | {
"repo_name": "SanderGielisse/Enderstone",
"path": "src/org/enderstone/server/inventory/HalfInventory.java",
"license": "gpl-3.0",
"size": 3724
} | [
"org.enderstone.server.api.messages.Message"
] | import org.enderstone.server.api.messages.Message; | import org.enderstone.server.api.messages.*; | [
"org.enderstone.server"
] | org.enderstone.server; | 2,350,433 |
public static synchronized DefaultBandwidthMeter getSingletonInstance(Context context) {
if (singletonInstance == null) {
singletonInstance = new DefaultBandwidthMeter.Builder(context).build();
}
return singletonInstance;
}
private static final int ELAPSED_MILLIS_FOR_ESTIMATE = 2000;
private static final int BYTES_TRANSFERRED_FOR_ESTIMATE = 512 * 1024;
private final ImmutableMap<Integer, Long> initialBitrateEstimates;
private final EventDispatcher eventDispatcher;
private final SlidingPercentile slidingPercentile;
private final Clock clock;
private final boolean resetOnNetworkTypeChange;
private int streamCount;
private long sampleStartTimeMs;
private long sampleBytesTransferred;
private @C.NetworkType int networkType;
private long totalElapsedTimeMs;
private long totalBytesTransferred;
private long bitrateEstimate;
private long lastReportedBitrateEstimate;
private boolean networkTypeOverrideSet;
private @C.NetworkType int networkTypeOverride;
@Deprecated
public DefaultBandwidthMeter() {
this(
null,
ImmutableMap.of(),
DEFAULT_SLIDING_WINDOW_MAX_WEIGHT,
Clock.DEFAULT,
false);
}
private DefaultBandwidthMeter(
@Nullable Context context,
Map<Integer, Long> initialBitrateEstimates,
int maxWeight,
Clock clock,
boolean resetOnNetworkTypeChange) {
this.initialBitrateEstimates = ImmutableMap.copyOf(initialBitrateEstimates);
this.eventDispatcher = new EventDispatcher();
this.slidingPercentile = new SlidingPercentile(maxWeight);
this.clock = clock;
this.resetOnNetworkTypeChange = resetOnNetworkTypeChange;
if (context != null) {
NetworkTypeObserver networkTypeObserver = NetworkTypeObserver.getInstance(context);
networkType = networkTypeObserver.getNetworkType();
bitrateEstimate = getInitialBitrateEstimateForNetworkType(networkType);
networkTypeObserver.register( this::onNetworkTypeChanged);
} else {
networkType = C.NETWORK_TYPE_UNKNOWN;
bitrateEstimate = getInitialBitrateEstimateForNetworkType(C.NETWORK_TYPE_UNKNOWN);
}
} | static synchronized DefaultBandwidthMeter function(Context context) { if (singletonInstance == null) { singletonInstance = new DefaultBandwidthMeter.Builder(context).build(); } return singletonInstance; } private static final int ELAPSED_MILLIS_FOR_ESTIMATE = 2000; private static final int BYTES_TRANSFERRED_FOR_ESTIMATE = 512 * 1024; private final ImmutableMap<Integer, Long> initialBitrateEstimates; private final EventDispatcher eventDispatcher; private final SlidingPercentile slidingPercentile; private final Clock clock; private final boolean resetOnNetworkTypeChange; private int streamCount; private long sampleStartTimeMs; private long sampleBytesTransferred; private @C.NetworkType int networkType; private long totalElapsedTimeMs; private long totalBytesTransferred; private long bitrateEstimate; private long lastReportedBitrateEstimate; private boolean networkTypeOverrideSet; private @C.NetworkType int networkTypeOverride; public DefaultBandwidthMeter() { this( null, ImmutableMap.of(), DEFAULT_SLIDING_WINDOW_MAX_WEIGHT, Clock.DEFAULT, false); } private DefaultBandwidthMeter( @Nullable Context context, Map<Integer, Long> initialBitrateEstimates, int maxWeight, Clock clock, boolean resetOnNetworkTypeChange) { this.initialBitrateEstimates = ImmutableMap.copyOf(initialBitrateEstimates); this.eventDispatcher = new EventDispatcher(); this.slidingPercentile = new SlidingPercentile(maxWeight); this.clock = clock; this.resetOnNetworkTypeChange = resetOnNetworkTypeChange; if (context != null) { NetworkTypeObserver networkTypeObserver = NetworkTypeObserver.getInstance(context); networkType = networkTypeObserver.getNetworkType(); bitrateEstimate = getInitialBitrateEstimateForNetworkType(networkType); networkTypeObserver.register( this::onNetworkTypeChanged); } else { networkType = C.NETWORK_TYPE_UNKNOWN; bitrateEstimate = getInitialBitrateEstimateForNetworkType(C.NETWORK_TYPE_UNKNOWN); } } | /**
* Returns a singleton instance of a {@link DefaultBandwidthMeter} with default configuration.
*
* @param context A {@link Context}.
* @return The singleton instance.
*/ | Returns a singleton instance of a <code>DefaultBandwidthMeter</code> with default configuration | getSingletonInstance | {
"repo_name": "androidx/media",
"path": "libraries/exoplayer/src/main/java/androidx/media3/exoplayer/upstream/DefaultBandwidthMeter.java",
"license": "apache-2.0",
"size": 29601
} | [
"android.content.Context",
"androidx.annotation.Nullable",
"androidx.media3.common.util.Clock",
"androidx.media3.common.util.NetworkTypeObserver",
"androidx.media3.exoplayer.upstream.BandwidthMeter",
"com.google.common.collect.ImmutableMap",
"java.util.Map"
] | import android.content.Context; import androidx.annotation.Nullable; import androidx.media3.common.util.Clock; import androidx.media3.common.util.NetworkTypeObserver; import androidx.media3.exoplayer.upstream.BandwidthMeter; import com.google.common.collect.ImmutableMap; import java.util.Map; | import android.content.*; import androidx.annotation.*; import androidx.media3.common.util.*; import androidx.media3.exoplayer.upstream.*; import com.google.common.collect.*; import java.util.*; | [
"android.content",
"androidx.annotation",
"androidx.media3",
"com.google.common",
"java.util"
] | android.content; androidx.annotation; androidx.media3; com.google.common; java.util; | 337,279 |
public Editor edit() throws IOException {
return DiskLruCache.this.edit(key, sequenceNumber);
} | Editor function() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } | /**
* Returns an editor for this snapshot's entry, or null if either the
* entry has changed since this snapshot was created or if another edit
* is in progress.
*/ | Returns an editor for this snapshot's entry, or null if either the entry has changed since this snapshot was created or if another edit is in progress | edit | {
"repo_name": "davols/android-light-dasftp",
"path": "app/src/main/java/com/github/davols/dasftp/DiskLruCache.java",
"license": "apache-2.0",
"size": 34386
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,154,054 |
public void hide() {
if (mAnchor == null) {
return;
}
try {
mAnchor.removeView(this);
topController.setVisibility(INVISIBLE);
mHandler.removeMessages(SHOW_PROGRESS);
} catch (IllegalArgumentException ex) {
Log.w("MediaController", "already removed");
}
mShowing = false;
} | void function() { if (mAnchor == null) { return; } try { mAnchor.removeView(this); topController.setVisibility(INVISIBLE); mHandler.removeMessages(SHOW_PROGRESS); } catch (IllegalArgumentException ex) { Log.w(STR, STR); } mShowing = false; } | /**
* Remove the controller from the screen.
*/ | Remove the controller from the screen | hide | {
"repo_name": "StrimBagZ/StrimBagZ",
"path": "app/src/main/java/net/lubot/strimbagzrewrite/util/VideoControllerView.java",
"license": "gpl-3.0",
"size": 27903
} | [
"android.util.Log"
] | import android.util.Log; | import android.util.*; | [
"android.util"
] | android.util; | 2,071,443 |
public double getRot() {
return rot;
}
// using rotating calipers approach as proposed by Toussaint, G. T (1983)
public ArbitrarilyOrientedBoundingBox(MIPoint2D[] displayCornerPoints) {
super();
MIPoint2D[] cvHull = ConvexHull.convex_hull(displayCornerPoints);
if (cvHull != null) {
SimpleMatrix pointMatrix = MIPoint2D.toSimpleMatrix(cvHull);
// first determining edges
Edge2D[] edges = getEdgesFromPoints(cvHull);
// then check bounding boxes that share a border with one of the edges
Boolean isFirstMinimum = true;
BoundingBox bestFittingBB = null;
Double bestFittingRot = 0d;
double minArea = 0;
for (int i = 0; i < edges.length; ++i) {
Edge2D curEdge = edges[i];
Double rot = curEdge.getAngle();
SimpleMatrix rotatedHull = rotatePoints(pointMatrix, rot);
BoundingBox bb = new BoundingBox(rotatedHull);
double curArea = bb.getArea();
if (curArea < minArea || isFirstMinimum) {
isFirstMinimum = false;
minArea = curArea;
bestFittingBB = bb;
bestFittingRot = -rot;
}
}
this.rot = bestFittingRot;
this.maxX = bestFittingBB.maxX;
this.maxY = bestFittingBB.maxY;
this.minX = bestFittingBB.minX;
this.minY = bestFittingBB.minY;
}
} | double function() { return rot; } public ArbitrarilyOrientedBoundingBox(MIPoint2D[] displayCornerPoints) { super(); MIPoint2D[] cvHull = ConvexHull.convex_hull(displayCornerPoints); if (cvHull != null) { SimpleMatrix pointMatrix = MIPoint2D.toSimpleMatrix(cvHull); Edge2D[] edges = getEdgesFromPoints(cvHull); Boolean isFirstMinimum = true; BoundingBox bestFittingBB = null; Double bestFittingRot = 0d; double minArea = 0; for (int i = 0; i < edges.length; ++i) { Edge2D curEdge = edges[i]; Double rot = curEdge.getAngle(); SimpleMatrix rotatedHull = rotatePoints(pointMatrix, rot); BoundingBox bb = new BoundingBox(rotatedHull); double curArea = bb.getArea(); if (curArea < minArea isFirstMinimum) { isFirstMinimum = false; minArea = curArea; bestFittingBB = bb; bestFittingRot = -rot; } } this.rot = bestFittingRot; this.maxX = bestFittingBB.maxX; this.maxY = bestFittingBB.maxY; this.minX = bestFittingBB.minX; this.minY = bestFittingBB.minY; } } | /**
* Describes how an axis aligned bounding box would need to be
* rotated to be parallel to this oriented bounding box
*/ | Describes how an axis aligned bounding box would need to be rotated to be parallel to this oriented bounding box | getRot | {
"repo_name": "mcguenther/MIScreen",
"path": "MIScreen/app/src/main/java/de/volzo/miscreen/arbitraryBoundingBox/ArbitrarilyOrientedBoundingBox.java",
"license": "mit",
"size": 3108
} | [
"org.ejml.simple.SimpleMatrix"
] | import org.ejml.simple.SimpleMatrix; | import org.ejml.simple.*; | [
"org.ejml.simple"
] | org.ejml.simple; | 2,624,415 |
InputStream downloadResetPassword(ResetPasswordInput body); | InputStream downloadResetPassword(ResetPasswordInput body); | /**
* Download file for reset password of the sensor.
*
* @param body The reset password input.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/ | Download file for reset password of the sensor | downloadResetPassword | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/security/azure-resourcemanager-security/src/main/java/com/azure/resourcemanager/security/models/IotSensorsModel.java",
"license": "mit",
"size": 11258
} | [
"java.io.InputStream"
] | import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 770,411 |
@Override
public Object visit(final ConstructorInvocation node) {
Expression exp = node.getExpression();
if (exp == null) {
final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass();
final ClassInfo sdc = sc.getDeclaringClass();
final ClassInfo dc = ClassInfoCompiler.this.classInfo.getDeclaringClass();
if (dc != null && dc.equals(sdc) && !Modifier.isStatic(sc.getModifiers())) {
final List l = new LinkedList();
l.add(new Identifier("param$0"));
exp = new QualifiedName(l);
node.setExpression(exp);
} else if (sdc != null && sdc.equals(ClassInfoCompiler.this.classInfo.getAnonymousDeclaringClass()) && !Modifier.isStatic(sc.getModifiers())) {
final List l = new LinkedList();
l.add(new Identifier("param$0"));
exp = new QualifiedName(l);
node.setExpression(exp);
}
}
List args = node.getArguments();
if (exp != null) {
if (args == null) {
args = new LinkedList();
node.setArguments(args);
}
args.add(0, exp);
}
if (args != null) {
ListIterator it = args.listIterator();
while (it.hasNext()) {
final Object o = ((Expression) it.next()).acceptVisitor(this);
if (o != null) {
if (o instanceof Expression) {
it.set(o);
} else {
throw new ExecutionError("malformed.argument", node);
}
}
}
ConstructorInfo cons = null;
try {
ClassInfo[] params = null;
it = args.listIterator();
int i = 0;
params = new ClassInfo[args.size()];
while (it.hasNext()) {
params[i++] = NodeProperties.getClassInfo((Expression) it.next());
}
if (node.isSuper()) {
final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass();
cons = ClassInfoUtilities.lookupConstructor(sc, params);
this.superConstructor = sc.getName();
} else {
cons = ClassInfoUtilities.lookupConstructor(ClassInfoCompiler.this.classInfo, params);
this.superConstructor = ClassInfoCompiler.this.classInfo.getName();
}
} catch (final NoSuchMethodException e) {
throw new CatchedExceptionError(e, node);
}
final ClassInfo[] pt = cons.getParameterTypes();
this.constructorParameters = new String[pt.length];
for (int i = 0; i < pt.length; i++) {
this.constructorParameters[i] = pt[i].getName();
}
}
if (this.superConstructor == null) {
final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass();
this.superConstructor = sc.getName();
}
return null;
}
| Object function(final ConstructorInvocation node) { Expression exp = node.getExpression(); if (exp == null) { final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass(); final ClassInfo sdc = sc.getDeclaringClass(); final ClassInfo dc = ClassInfoCompiler.this.classInfo.getDeclaringClass(); if (dc != null && dc.equals(sdc) && !Modifier.isStatic(sc.getModifiers())) { final List l = new LinkedList(); l.add(new Identifier(STR)); exp = new QualifiedName(l); node.setExpression(exp); } else if (sdc != null && sdc.equals(ClassInfoCompiler.this.classInfo.getAnonymousDeclaringClass()) && !Modifier.isStatic(sc.getModifiers())) { final List l = new LinkedList(); l.add(new Identifier(STR)); exp = new QualifiedName(l); node.setExpression(exp); } } List args = node.getArguments(); if (exp != null) { if (args == null) { args = new LinkedList(); node.setArguments(args); } args.add(0, exp); } if (args != null) { ListIterator it = args.listIterator(); while (it.hasNext()) { final Object o = ((Expression) it.next()).acceptVisitor(this); if (o != null) { if (o instanceof Expression) { it.set(o); } else { throw new ExecutionError(STR, node); } } } ConstructorInfo cons = null; try { ClassInfo[] params = null; it = args.listIterator(); int i = 0; params = new ClassInfo[args.size()]; while (it.hasNext()) { params[i++] = NodeProperties.getClassInfo((Expression) it.next()); } if (node.isSuper()) { final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass(); cons = ClassInfoUtilities.lookupConstructor(sc, params); this.superConstructor = sc.getName(); } else { cons = ClassInfoUtilities.lookupConstructor(ClassInfoCompiler.this.classInfo, params); this.superConstructor = ClassInfoCompiler.this.classInfo.getName(); } } catch (final NoSuchMethodException e) { throw new CatchedExceptionError(e, node); } final ClassInfo[] pt = cons.getParameterTypes(); this.constructorParameters = new String[pt.length]; for (int i = 0; i < pt.length; i++) { this.constructorParameters[i] = pt[i].getName(); } } if (this.superConstructor == null) { final ClassInfo sc = ClassInfoCompiler.this.classInfo.getSuperclass(); this.superConstructor = sc.getName(); } return null; } | /**
* Visits a ConstructorInvocation
*
* @param node the node to visit
*/ | Visits a ConstructorInvocation | visit | {
"repo_name": "mbshopM/openconcerto",
"path": "OpenConcerto/src/koala/dynamicjava/interpreter/ClassInfoCompiler.java",
"license": "gpl-3.0",
"size": 77727
} | [
"java.lang.reflect.Modifier",
"java.util.LinkedList",
"java.util.List",
"java.util.ListIterator"
] | import java.lang.reflect.Modifier; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 1,163,630 |
public void prepareNormalDownload() throws IOException, ParseException {
fileHelper.prepareDownload(lastModifyFile(), file(), contentLength, lastModify);
} | void function() throws IOException, ParseException { fileHelper.prepareDownload(lastModifyFile(), file(), contentLength, lastModify); } | /**
* prepare normal download, create files and save last-modify.
*
* @throws IOException
* @throws ParseException
*/ | prepare normal download, create files and save last-modify | prepareNormalDownload | {
"repo_name": "weiwenqiang/GitHub",
"path": "Download/RxDownload-master/rxdownload2/src/main/java/zlc/season/rxdownload2/entity/TemporaryRecord.java",
"license": "apache-2.0",
"size": 8705
} | [
"java.io.IOException",
"java.text.ParseException"
] | import java.io.IOException; import java.text.ParseException; | import java.io.*; import java.text.*; | [
"java.io",
"java.text"
] | java.io; java.text; | 1,454,337 |
public boolean containsKey(String key, Charset charset)
{
return meta.containsKey(BinaryValue.unsafeCreate(key.getBytes(charset)));
}
/**
* Get a user metadata entry.
* <p>
* This method and its {@link RiakUserMetadata#put(java.lang.String, java.lang.String) }
* counterpart use the default {@code Charset} to convert the {@code String}s.
* </p>
*
* @param key the key for the user metadata entry as a {@code String} encoded using the default {@code Charset}
* @return the value for the entry converted to a {@code String} using the default {@code Charset} | boolean function(String key, Charset charset) { return meta.containsKey(BinaryValue.unsafeCreate(key.getBytes(charset))); } /** * Get a user metadata entry. * <p> * This method and its {@link RiakUserMetadata#put(java.lang.String, java.lang.String) } * counterpart use the default {@code Charset} to convert the {@code String}s. * </p> * * @param key the key for the user metadata entry as a {@code String} encoded using the default {@code Charset} * @return the value for the entry converted to a {@code String} using the default {@code Charset} | /**
* Determine if a specific usermeta entry is present.
* <p>
* This method uses the supplied {@code Charset} to convert the supplied key.
* </p>
*
* @param key the metadata key
* @return {@code true} if the entry is present, {@code false} otherwise.
*/ | Determine if a specific usermeta entry is present. This method uses the supplied Charset to convert the supplied key. | containsKey | {
"repo_name": "basho/riak-java-client",
"path": "src/main/java/com/basho/riak/client/core/query/UserMetadata/RiakUserMetadata.java",
"license": "apache-2.0",
"size": 8561
} | [
"com.basho.riak.client.core.util.BinaryValue",
"java.nio.charset.Charset"
] | import com.basho.riak.client.core.util.BinaryValue; import java.nio.charset.Charset; | import com.basho.riak.client.core.util.*; import java.nio.charset.*; | [
"com.basho.riak",
"java.nio"
] | com.basho.riak; java.nio; | 2,808,314 |
@Override
public GatewayOperationResponse beginSetSharedKey(String networkName, String localNetworkName, GatewaySetSharedKeyParameters parameters) throws ParserConfigurationException, SAXException, TransformerException, IOException, ServiceException {
// Validate
if (networkName == null) {
throw new NullPointerException("networkName");
}
if (localNetworkName == null) {
throw new NullPointerException("localNetworkName");
}
if (parameters == null) {
throw new NullPointerException("parameters");
}
// Tracing
boolean shouldTrace = CloudTracing.getIsEnabled();
String invocationId = null;
if (shouldTrace) {
invocationId = Long.toString(CloudTracing.getNextInvocationId());
HashMap<String, Object> tracingParameters = new HashMap<String, Object>();
tracingParameters.put("networkName", networkName);
tracingParameters.put("localNetworkName", localNetworkName);
tracingParameters.put("parameters", parameters);
CloudTracing.enter(invocationId, this, "beginSetSharedKeyAsync", tracingParameters);
}
// Construct URL
String url = "";
url = url + "/";
if (this.getClient().getCredentials().getSubscriptionId() != null) {
url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8");
}
url = url + "/services/networking/";
url = url + URLEncoder.encode(networkName, "UTF-8");
url = url + "/gateway/connection/";
url = url + URLEncoder.encode(localNetworkName, "UTF-8");
url = url + "/sharedkey";
String baseUrl = this.getClient().getBaseUri().toString();
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl.charAt(baseUrl.length() - 1) == '/') {
baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0);
}
if (url.charAt(0) == '/') {
url = url.substring(1);
}
url = baseUrl + "/" + url;
url = url.replace(" ", "%20");
// Create HTTP transport objects
HttpPost httpRequest = new HttpPost(url);
// Set Headers
httpRequest.setHeader("Content-Type", "application/xml");
httpRequest.setHeader("x-ms-version", "2015-04-01");
// Serialize Request
String requestContent = null;
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document requestDoc = documentBuilder.newDocument();
Element sharedKeyElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "SharedKey");
requestDoc.appendChild(sharedKeyElement);
if (parameters.getValue() != null) {
Element valueElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "Value");
valueElement.appendChild(requestDoc.createTextNode(parameters.getValue()));
sharedKeyElement.appendChild(valueElement);
}
DOMSource domSource = new DOMSource(requestDoc);
StringWriter stringWriter = new StringWriter();
StreamResult streamResult = new StreamResult(stringWriter);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.transform(domSource, streamResult);
requestContent = stringWriter.toString();
StringEntity entity = new StringEntity(requestContent);
httpRequest.setEntity(entity);
httpRequest.setHeader("Content-Type", "application/xml");
// Send Request
HttpResponse httpResponse = null;
try {
if (shouldTrace) {
CloudTracing.sendRequest(invocationId, httpRequest);
}
httpResponse = this.getClient().getHttpClient().execute(httpRequest);
if (shouldTrace) {
CloudTracing.receiveResponse(invocationId, httpResponse);
}
int statusCode = httpResponse.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_ACCEPTED) {
ServiceException ex = ServiceException.createFromXml(httpRequest, requestContent, httpResponse, httpResponse.getEntity());
if (shouldTrace) {
CloudTracing.error(invocationId, ex);
}
throw ex;
}
// Create Result
GatewayOperationResponse result = null;
// Deserialize Response
if (statusCode == HttpStatus.SC_ACCEPTED) {
InputStream responseContent = httpResponse.getEntity().getContent();
result = new GatewayOperationResponse();
DocumentBuilderFactory documentBuilderFactory2 = DocumentBuilderFactory.newInstance();
documentBuilderFactory2.setNamespaceAware(true);
DocumentBuilder documentBuilder2 = documentBuilderFactory2.newDocumentBuilder();
Document responseDoc = documentBuilder2.parse(new BOMInputStream(responseContent));
Element gatewayOperationAsyncResponseElement = XmlUtility.getElementByTagNameNS(responseDoc, "http://schemas.microsoft.com/windowsazure", "GatewayOperationAsyncResponse");
if (gatewayOperationAsyncResponseElement != null) {
Element idElement = XmlUtility.getElementByTagNameNS(gatewayOperationAsyncResponseElement, "http://schemas.microsoft.com/windowsazure", "ID");
if (idElement != null) {
String idInstance;
idInstance = idElement.getTextContent();
result.setOperationId(idInstance);
}
}
}
result.setStatusCode(statusCode);
if (httpResponse.getHeaders("x-ms-request-id").length > 0) {
result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue());
}
if (shouldTrace) {
CloudTracing.exit(invocationId, result);
}
return result;
} finally {
if (httpResponse != null && httpResponse.getEntity() != null) {
httpResponse.getEntity().getContent().close();
}
}
} | GatewayOperationResponse function(String networkName, String localNetworkName, GatewaySetSharedKeyParameters parameters) throws ParserConfigurationException, SAXException, TransformerException, IOException, ServiceException { if (networkName == null) { throw new NullPointerException(STR); } if (localNetworkName == null) { throw new NullPointerException(STR); } if (parameters == null) { throw new NullPointerException(STR); } boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put(STR, networkName); tracingParameters.put(STR, localNetworkName); tracingParameters.put(STR, parameters); CloudTracing.enter(invocationId, this, STR, tracingParameters); } String url = STR/STRUTF-8STR/services/networking/STRUTF-8STR/gateway/connection/STRUTF-8STR/sharedkeySTR/STR STR%20STRContent-TypeSTRapplication/xmlSTRx-ms-versionSTR2015-04-01STRhttp: requestDoc.appendChild(sharedKeyElement); if (parameters.getValue() != null) { Element valueElement = requestDoc.createElementNS(STRContent-TypeSTRapplication/xmlSTRhttp: if (gatewayOperationAsyncResponseElement != null) { Element idElement = XmlUtility.getElementByTagNameNS(gatewayOperationAsyncResponseElement, STRx-ms-request-idSTRx-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } | /**
* The Begin Set Virtual Network Gateway Shared Key operation sets the
* shared key on the virtual network gateway for the specified virtual
* network connection to the specified local network in Azure. (see
* http://msdn.microsoft.com/en-us/library/windowsazure/jj154114.aspx for
* more information)
*
* @param networkName Required. The name of the virtual network for this
* gateway.
* @param localNetworkName Required. The name of the local network.
* @param parameters Required. Parameters supplied to the Begin Virtual
* Network Gateway Set Shared Key request.
* @throws ParserConfigurationException Thrown if there was an error
* configuring the parser for the response body.
* @throws SAXException Thrown if there was an error parsing the response
* body.
* @throws TransformerException Thrown if there was an error creating the
* DOM transformer.
* @throws IOException Signals that an I/O exception of some sort has
* occurred. This class is the general class of exceptions produced by
* failed or interrupted I/O operations.
* @throws ServiceException Thrown if an unexpected response is found.
* @return A standard service response including an HTTP status code and
* request ID.
*/ | The Begin Set Virtual Network Gateway Shared Key operation sets the shared key on the virtual network gateway for the specified virtual network connection to the specified local network in Azure. (see HREF for more information) | beginSetSharedKey | {
"repo_name": "flydream2046/azure-sdk-for-java",
"path": "service-management/azure-svc-mgmt-network/src/main/java/com/microsoft/windowsazure/management/network/GatewayOperationsImpl.java",
"license": "apache-2.0",
"size": 573643
} | [
"com.microsoft.windowsazure.core.utils.XmlUtility",
"com.microsoft.windowsazure.exception.ServiceException",
"com.microsoft.windowsazure.management.network.models.GatewayOperationResponse",
"com.microsoft.windowsazure.management.network.models.GatewaySetSharedKeyParameters",
"com.microsoft.windowsazure.tracing.CloudTracing",
"java.io.IOException",
"java.util.HashMap",
"javax.xml.parsers.ParserConfigurationException",
"javax.xml.transform.TransformerException",
"org.w3c.dom.Element",
"org.xml.sax.SAXException"
] | import com.microsoft.windowsazure.core.utils.XmlUtility; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.network.models.GatewayOperationResponse; import com.microsoft.windowsazure.management.network.models.GatewaySetSharedKeyParameters; import com.microsoft.windowsazure.tracing.CloudTracing; import java.io.IOException; import java.util.HashMap; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.w3c.dom.Element; import org.xml.sax.SAXException; | import com.microsoft.windowsazure.core.utils.*; import com.microsoft.windowsazure.exception.*; import com.microsoft.windowsazure.management.network.models.*; import com.microsoft.windowsazure.tracing.*; import java.io.*; import java.util.*; import javax.xml.parsers.*; import javax.xml.transform.*; import org.w3c.dom.*; import org.xml.sax.*; | [
"com.microsoft.windowsazure",
"java.io",
"java.util",
"javax.xml",
"org.w3c.dom",
"org.xml.sax"
] | com.microsoft.windowsazure; java.io; java.util; javax.xml; org.w3c.dom; org.xml.sax; | 1,499,574 |
protected EnumSyntax[] getEnumValueTable()
{
return enumValueTable;
} | EnumSyntax[] function() { return enumValueTable; } | /**
* Returns a table with the enumeration values for this object.
*
* @return The enumeration values.
*/ | Returns a table with the enumeration values for this object | getEnumValueTable | {
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/javax/print/attribute/standard/OrientationRequested.java",
"license": "gpl-2.0",
"size": 4924
} | [
"javax.print.attribute.EnumSyntax"
] | import javax.print.attribute.EnumSyntax; | import javax.print.attribute.*; | [
"javax.print"
] | javax.print; | 1,814,462 |
public void firePropertyChange(String propertyName, Object oldValue, Object newValue) {
firePropertyChange(new PropertyChangeEvent(source, propertyName, oldValue, newValue));
} | void function(String propertyName, Object oldValue, Object newValue) { firePropertyChange(new PropertyChangeEvent(source, propertyName, oldValue, newValue)); } | /**
* Report a bound property update to any registered listeners. No event is
* fired if old and new are equal and non-null.
*
* @param propertyName The programmatic name of the property that was
* changed.
* @param oldValue The old value of the property.
* @param newValue The new value of the property.
*/ | Report a bound property update to any registered listeners. No event is fired if old and new are equal and non-null | firePropertyChange | {
"repo_name": "springrichclient/springrcp",
"path": "spring-richclient-core/src/main/java/org/springframework/binding/value/support/PropertyChangeSupport.java",
"license": "apache-2.0",
"size": 9967
} | [
"java.beans.PropertyChangeEvent"
] | import java.beans.PropertyChangeEvent; | import java.beans.*; | [
"java.beans"
] | java.beans; | 1,268,340 |
public ArrayList<ProfileItem> getParentItems()
{
return _parentList;
} | ArrayList<ProfileItem> function() { return _parentList; } | /**
* Returns the parent items.
*/ | Returns the parent items | getParentItems | {
"repo_name": "dlitz/resin",
"path": "modules/quercus/src/com/caucho/quercus/profile/ProfileMethod.java",
"license": "gpl-2.0",
"size": 3942
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 76,601 |
public @Nullable TimeValue readOptionalTimeValue() throws IOException {
if (readBoolean()) {
return readTimeValue();
} else {
return null;
}
} | @Nullable TimeValue function() throws IOException { if (readBoolean()) { return readTimeValue(); } else { return null; } } | /**
* Read an optional {@link TimeValue} from the stream, returning null if no TimeValue was written.
*/ | Read an optional <code>TimeValue</code> from the stream, returning null if no TimeValue was written | readOptionalTimeValue | {
"repo_name": "crate/crate",
"path": "server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java",
"license": "apache-2.0",
"size": 44326
} | [
"io.crate.common.unit.TimeValue",
"java.io.IOException",
"javax.annotation.Nullable"
] | import io.crate.common.unit.TimeValue; import java.io.IOException; import javax.annotation.Nullable; | import io.crate.common.unit.*; import java.io.*; import javax.annotation.*; | [
"io.crate.common",
"java.io",
"javax.annotation"
] | io.crate.common; java.io; javax.annotation; | 2,627,648 |
EReference getACD_DirPhsB(); | EReference getACD_DirPhsB(); | /**
* Returns the meta object for the reference '{@link gluemodel.substationStandard.Dataclasses.ACD#getDirPhsB <em>Dir Phs B</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Dir Phs B</em>'.
* @see gluemodel.substationStandard.Dataclasses.ACD#getDirPhsB()
* @see #getACD()
* @generated
*/ | Returns the meta object for the reference '<code>gluemodel.substationStandard.Dataclasses.ACD#getDirPhsB Dir Phs B</code>'. | getACD_DirPhsB | {
"repo_name": "georghinkel/ttc2017smartGrids",
"path": "solutions/eMoflon/rgse.ttc17.metamodels.src/src/gluemodel/substationStandard/Dataclasses/DataclassesPackage.java",
"license": "mit",
"size": 381891
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,297,649 |
public void addInterface(Class intf) {
Assert.notNull(intf, "Interface must not be null");
if (!intf.isInterface()) {
throw new IllegalArgumentException("[" + intf.getName() + "] is not an interface");
}
if (!this.interfaces.contains(intf)) {
this.interfaces.add(intf);
adviceChanged();
}
}
| void function(Class intf) { Assert.notNull(intf, STR); if (!intf.isInterface()) { throw new IllegalArgumentException("[" + intf.getName() + STR); } if (!this.interfaces.contains(intf)) { this.interfaces.add(intf); adviceChanged(); } } | /**
* Add a new proxied interface.
* @param intf the additional interface to proxy
*/ | Add a new proxied interface | addInterface | {
"repo_name": "codeApeFromChina/resource",
"path": "frame_packages/java_libs/spring-2.5.6-src/src/org/springframework/aop/framework/AdvisedSupport.java",
"license": "unlicense",
"size": 18632
} | [
"org.springframework.util.Assert"
] | import org.springframework.util.Assert; | import org.springframework.util.*; | [
"org.springframework.util"
] | org.springframework.util; | 1,023,146 |
double getRating(Resource resource); | double getRating(Resource resource); | /**
* Get the overall rating for a resource.
* @param resource The content resource, this is usually an entry.
* @return The rating or {@code 0} if
* the passed in content resource is not part of
* Slingshot.
*/ | Get the overall rating for a resource | getRating | {
"repo_name": "trekawek/sling",
"path": "samples/slingshot/src/main/java/org/apache/sling/sample/slingshot/ratings/RatingsService.java",
"license": "apache-2.0",
"size": 2459
} | [
"org.apache.sling.api.resource.Resource"
] | import org.apache.sling.api.resource.Resource; | import org.apache.sling.api.resource.*; | [
"org.apache.sling"
] | org.apache.sling; | 1,149,872 |
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<EvaluatePoliciesResponseInner> evaluatePoliciesWithResponse(
String resourceGroupName,
String labName,
String name,
EvaluatePoliciesRequest evaluatePoliciesRequest,
Context context) {
return evaluatePoliciesWithResponseAsync(resourceGroupName, labName, name, evaluatePoliciesRequest, context)
.block();
} | @ServiceMethod(returns = ReturnType.SINGLE) Response<EvaluatePoliciesResponseInner> function( String resourceGroupName, String labName, String name, EvaluatePoliciesRequest evaluatePoliciesRequest, Context context) { return evaluatePoliciesWithResponseAsync(resourceGroupName, labName, name, evaluatePoliciesRequest, context) .block(); } | /**
* Evaluates lab policy.
*
* @param resourceGroupName The name of the resource group.
* @param labName The name of the lab.
* @param name The name of the policy set.
* @param evaluatePoliciesRequest Request body for evaluating a policy set.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response body for evaluating a policy set.
*/ | Evaluates lab policy | evaluatePoliciesWithResponse | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/devtestlabs/azure-resourcemanager-devtestlabs/src/main/java/com/azure/resourcemanager/devtestlabs/implementation/PolicySetsClientImpl.java",
"license": "mit",
"size": 12447
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context",
"com.azure.resourcemanager.devtestlabs.fluent.models.EvaluatePoliciesResponseInner",
"com.azure.resourcemanager.devtestlabs.models.EvaluatePoliciesRequest"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.devtestlabs.fluent.models.EvaluatePoliciesResponseInner; import com.azure.resourcemanager.devtestlabs.models.EvaluatePoliciesRequest; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.devtestlabs.fluent.models.*; import com.azure.resourcemanager.devtestlabs.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,909,957 |
@Test(expected = IllegalArgumentException.class)
public void testConstructorRequiresExtensionLoader() throws Exception {
new MemoryMapArchiveImpl("test.jar", null);
} | @Test(expected = IllegalArgumentException.class) void function() throws Exception { new MemoryMapArchiveImpl(STR, null); } | /**
* Test to ensure the MemoryMapArchive requires a name
*
* @throws Exception
*/ | Test to ensure the MemoryMapArchive requires a name | testConstructorRequiresExtensionLoader | {
"repo_name": "chirino/shrinkwrap",
"path": "impl-base/src/test/java/org/jboss/shrinkwrap/impl/base/unit/MemoryMapArchiveTestCase.java",
"license": "apache-2.0",
"size": 3280
} | [
"org.jboss.shrinkwrap.impl.base.MemoryMapArchiveImpl",
"org.junit.Test"
] | import org.jboss.shrinkwrap.impl.base.MemoryMapArchiveImpl; import org.junit.Test; | import org.jboss.shrinkwrap.impl.base.*; import org.junit.*; | [
"org.jboss.shrinkwrap",
"org.junit"
] | org.jboss.shrinkwrap; org.junit; | 741,675 |
public Vector getCollDetailFKinPK()
{
return collDetailFKinPK;
}
| Vector function() { return collDetailFKinPK; } | /**
* Gets the collDetailFKinPK.
* @return Returns a java.util.Vector
*/ | Gets the collDetailFKinPK | getCollDetailFKinPK | {
"repo_name": "kuali/ojb-1.0.4",
"path": "src/test/org/apache/ojb/odmg/shared/Master.java",
"license": "apache-2.0",
"size": 2883
} | [
"java.util.Vector"
] | import java.util.Vector; | import java.util.*; | [
"java.util"
] | java.util; | 1,725,802 |
private static boolean confirmPerspectiveSwitch(IWorkbenchWindow window, IPerspectiveDescriptor finalPersp) {
IPreferenceStore store = IDEWorkbenchPlugin.getDefault().getPreferenceStore();
String pspm = store.getString(IDEInternalPreferences.PROJECT_SWITCH_PERSP_MODE);
if (!IDEInternalPreferences.PSPM_PROMPT.equals(pspm)) {
// Return whether or not we should always switch
return IDEInternalPreferences.PSPM_ALWAYS.equals(pspm);
}
String desc = finalPersp.getDescription();
String message;
if (desc == null || desc.length() == 0)
message = NLS.bind(ResourceMessages.NewProject_perspSwitchMessage, finalPersp.getLabel());
else
message = NLS.bind(ResourceMessages.NewProject_perspSwitchMessageWithDesc,
new String[] { finalPersp.getLabel(), desc });
MessageDialogWithToggle dialog = MessageDialogWithToggle.openYesNoQuestion(window.getShell(),
ResourceMessages.NewProject_perspSwitchTitle, message,
null ,
false , store, IDEInternalPreferences.PROJECT_SWITCH_PERSP_MODE);
int result = dialog.getReturnCode();
// If we are not going to prompt anymore propogate the choice.
if (dialog.getToggleState()) {
String preferenceValue;
if (result == IDialogConstants.YES_ID) {
// Doesn't matter if it is replace or new window
// as we are going to use the open perspective setting
preferenceValue = IWorkbenchPreferenceConstants.OPEN_PERSPECTIVE_REPLACE;
} else {
preferenceValue = IWorkbenchPreferenceConstants.NO_NEW_PERSPECTIVE;
}
// update PROJECT_OPEN_NEW_PERSPECTIVE to correspond
PrefUtil.getAPIPreferenceStore().setValue(IDE.Preferences.PROJECT_OPEN_NEW_PERSPECTIVE, preferenceValue);
}
return result == IDialogConstants.YES_ID;
}
| static boolean function(IWorkbenchWindow window, IPerspectiveDescriptor finalPersp) { IPreferenceStore store = IDEWorkbenchPlugin.getDefault().getPreferenceStore(); String pspm = store.getString(IDEInternalPreferences.PROJECT_SWITCH_PERSP_MODE); if (!IDEInternalPreferences.PSPM_PROMPT.equals(pspm)) { return IDEInternalPreferences.PSPM_ALWAYS.equals(pspm); } String desc = finalPersp.getDescription(); String message; if (desc == null desc.length() == 0) message = NLS.bind(ResourceMessages.NewProject_perspSwitchMessage, finalPersp.getLabel()); else message = NLS.bind(ResourceMessages.NewProject_perspSwitchMessageWithDesc, new String[] { finalPersp.getLabel(), desc }); MessageDialogWithToggle dialog = MessageDialogWithToggle.openYesNoQuestion(window.getShell(), ResourceMessages.NewProject_perspSwitchTitle, message, null , false , store, IDEInternalPreferences.PROJECT_SWITCH_PERSP_MODE); int result = dialog.getReturnCode(); if (dialog.getToggleState()) { String preferenceValue; if (result == IDialogConstants.YES_ID) { preferenceValue = IWorkbenchPreferenceConstants.OPEN_PERSPECTIVE_REPLACE; } else { preferenceValue = IWorkbenchPreferenceConstants.NO_NEW_PERSPECTIVE; } PrefUtil.getAPIPreferenceStore().setValue(IDE.Preferences.PROJECT_OPEN_NEW_PERSPECTIVE, preferenceValue); } return result == IDialogConstants.YES_ID; } | /**
* Prompts the user for whether to switch perspectives.
*
* @param window
* The workbench window in which to switch perspectives; must not
* be <code>null</code>
* @param finalPersp
* The perspective to switch to; must not be <code>null</code>.
*
* @return <code>true</code> if it's OK to switch, <code>false</code>
* otherwise
*/ | Prompts the user for whether to switch perspectives | confirmPerspectiveSwitch | {
"repo_name": "webratio/typescript.java",
"path": "eclipse/ts.eclipse.ide.ui/src/ts/eclipse/ide/ui/wizards/AbstractNewProjectWizard.java",
"license": "mit",
"size": 20306
} | [
"org.eclipse.jface.dialogs.IDialogConstants",
"org.eclipse.jface.dialogs.MessageDialogWithToggle",
"org.eclipse.jface.preference.IPreferenceStore",
"org.eclipse.osgi.util.NLS",
"org.eclipse.ui.IPerspectiveDescriptor",
"org.eclipse.ui.IWorkbenchPreferenceConstants",
"org.eclipse.ui.IWorkbenchWindow",
"org.eclipse.ui.internal.ide.IDEInternalPreferences",
"org.eclipse.ui.internal.ide.IDEWorkbenchPlugin",
"org.eclipse.ui.internal.util.PrefUtil",
"org.eclipse.ui.internal.wizards.newresource.ResourceMessages"
] | import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.MessageDialogWithToggle; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.osgi.util.NLS; import org.eclipse.ui.IPerspectiveDescriptor; import org.eclipse.ui.IWorkbenchPreferenceConstants; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.internal.ide.IDEInternalPreferences; import org.eclipse.ui.internal.ide.IDEWorkbenchPlugin; import org.eclipse.ui.internal.util.PrefUtil; import org.eclipse.ui.internal.wizards.newresource.ResourceMessages; | import org.eclipse.jface.dialogs.*; import org.eclipse.jface.preference.*; import org.eclipse.osgi.util.*; import org.eclipse.ui.*; import org.eclipse.ui.internal.ide.*; import org.eclipse.ui.internal.util.*; import org.eclipse.ui.internal.wizards.newresource.*; | [
"org.eclipse.jface",
"org.eclipse.osgi",
"org.eclipse.ui"
] | org.eclipse.jface; org.eclipse.osgi; org.eclipse.ui; | 303,644 |
public void getSequence(ByteArrayInputStream source, ByteArrayOutputStream sequence)
throws CodecException {
if (source.read() != SEQUENCE_TAG) {
throw new CodecException("Not a sequence");
}
getSegment(source, sequence);
} | void function(ByteArrayInputStream source, ByteArrayOutputStream sequence) throws CodecException { if (source.read() != SEQUENCE_TAG) { throw new CodecException(STR); } getSegment(source, sequence); } | /**
* Get an ASN.1 sequence.
*
* @param source
* @param sequence
* @return
* @throws CodecException
*/ | Get an ASN.1 sequence | getSequence | {
"repo_name": "swbrenneis/CryptoKitty",
"path": "src/org/cryptokitty/codec/DERCodec.java",
"license": "gpl-3.0",
"size": 6398
} | [
"java.io.ByteArrayInputStream",
"java.io.ByteArrayOutputStream",
"org.cryptokitty.exceptions.CodecException"
] | import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import org.cryptokitty.exceptions.CodecException; | import java.io.*; import org.cryptokitty.exceptions.*; | [
"java.io",
"org.cryptokitty.exceptions"
] | java.io; org.cryptokitty.exceptions; | 2,301,445 |
EReference getNumExpr__OrdinalOp_1(); | EReference getNumExpr__OrdinalOp_1(); | /**
* Returns the meta object for the containment reference list '{@link cruise.umple.umple.NumExpr_#getOrdinalOp_1 <em>Ordinal Op 1</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Ordinal Op 1</em>'.
* @see cruise.umple.umple.NumExpr_#getOrdinalOp_1()
* @see #getNumExpr_()
* @generated
*/ | Returns the meta object for the containment reference list '<code>cruise.umple.umple.NumExpr_#getOrdinalOp_1 Ordinal Op 1</code>'. | getNumExpr__OrdinalOp_1 | {
"repo_name": "ahmedvc/umple",
"path": "cruise.umple.xtext/src-gen/cruise/umple/umple/UmplePackage.java",
"license": "mit",
"size": 485842
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 392,515 |
public void testAckedMessageAreConsumed() throws JMSException {
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue("test");
MessageProducer producer = session.createProducer(queue);
producer.send(session.createTextMessage("Hello"));
// Consume the message...
MessageConsumer consumer = session.createConsumer(queue);
Message msg = consumer.receive(1000);
assertNotNull(msg);
// Reset the session.
session.close();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
// Attempt to Consume the message...
consumer = session.createConsumer(queue);
msg = consumer.receive(1000);
assertNull(msg);
session.close();
} | void function() throws JMSException { connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); Queue queue = session.createQueue("test"); MessageProducer producer = session.createProducer(queue); producer.send(session.createTextMessage("Hello")); MessageConsumer consumer = session.createConsumer(queue); Message msg = consumer.receive(1000); assertNotNull(msg); session.close(); session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); consumer = session.createConsumer(queue); msg = consumer.receive(1000); assertNull(msg); session.close(); } | /**
* Tests if acknowleged messages are being consumed.
*
* @throws javax.jms.JMSException
*/ | Tests if acknowleged messages are being consumed | testAckedMessageAreConsumed | {
"repo_name": "l-dobrev/activemq-artemis",
"path": "tests/activemq5-unit-tests/src/test/java/org/apache/activemq/JmsAutoAckTest.java",
"license": "apache-2.0",
"size": 2445
} | [
"javax.jms.JMSException",
"javax.jms.Message",
"javax.jms.MessageConsumer",
"javax.jms.MessageProducer",
"javax.jms.Queue",
"javax.jms.Session"
] | import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; | import javax.jms.*; | [
"javax.jms"
] | javax.jms; | 1,205,308 |
private double calculateAccruedInterest(IsdaPremiumLegSchedule premiumLegSchedule, LocalDate stepinDate) {
int n = premiumLegSchedule.getNumPayments();
// stepinDate is before first accStart or after last accEnd
if (!stepinDate.isAfter(premiumLegSchedule.getAccStartDate(0)) ||
!stepinDate.isBefore(premiumLegSchedule.getAccEndDate(n - 1))) {
return 0.0;
}
int index = premiumLegSchedule.getAccStartDateIndex(stepinDate);
if (index >= 0) {
return 0.0; // on accrual start date
}
index = -(index + 1); // binary search notation
if (index == 0) {
throw new MathException("Error in calculateAccruedInterest - check logic"); // this should never be hit
}
return _accuralDayCount.yearFraction(premiumLegSchedule.getAccStartDate(index - 1), stepinDate);
} | double function(IsdaPremiumLegSchedule premiumLegSchedule, LocalDate stepinDate) { int n = premiumLegSchedule.getNumPayments(); if (!stepinDate.isAfter(premiumLegSchedule.getAccStartDate(0)) !stepinDate.isBefore(premiumLegSchedule.getAccEndDate(n - 1))) { return 0.0; } int index = premiumLegSchedule.getAccStartDateIndex(stepinDate); if (index >= 0) { return 0.0; } index = -(index + 1); if (index == 0) { throw new MathException(STR); } return _accuralDayCount.yearFraction(premiumLegSchedule.getAccStartDate(index - 1), stepinDate); } | /**
* Calculate the accrued premium at the start of a trade.
*
* @param premiumLegSchedule
* @param stepinDate The trade effective date
* @return accrued premium
*/ | Calculate the accrued premium at the start of a trade | calculateAccruedInterest | {
"repo_name": "nssales/Strata",
"path": "modules/pricer/src/main/java/com/opengamma/strata/pricer/impl/credit/isda/IsdaCompliantPresentValueCreditDefaultSwap.java",
"license": "apache-2.0",
"size": 16016
} | [
"com.opengamma.analytics.math.MathException",
"java.time.LocalDate"
] | import com.opengamma.analytics.math.MathException; import java.time.LocalDate; | import com.opengamma.analytics.math.*; import java.time.*; | [
"com.opengamma.analytics",
"java.time"
] | com.opengamma.analytics; java.time; | 2,788,615 |
public static AOffset create(int[] indexes, int apos, int alen) {
final int maxValue = indexes[alen - 1];
if(maxValue < 0)
throw new DMLCompressionException("Invalid sizes given");
final int endLength = alen - apos;
final float avgDist = (float) maxValue / endLength;
if(avgDist < 256)
return new OffsetByte(indexes, apos, alen);
else
return new OffsetChar(indexes, apos, alen);
} | static AOffset function(int[] indexes, int apos, int alen) { final int maxValue = indexes[alen - 1]; if(maxValue < 0) throw new DMLCompressionException(STR); final int endLength = alen - apos; final float avgDist = (float) maxValue / endLength; if(avgDist < 256) return new OffsetByte(indexes, apos, alen); else return new OffsetChar(indexes, apos, alen); } | /**
* Create a Offset based on a subset of the indexes given.
*
* This is useful if the input is created from a CSR matrix, since it allows us to not reallocate the indexes[] but
* use the shared indexes from the entire CSR representation.
*
* @param indexes The indexes from which to take the offsets.
* @param apos The position to start looking from in the indexes.
* @param alen The position to end looking at in the indexes.
* @return A new Offset.
*/ | Create a Offset based on a subset of the indexes given. This is useful if the input is created from a CSR matrix, since it allows us to not reallocate the indexes[] but use the shared indexes from the entire CSR representation | create | {
"repo_name": "apache/incubator-systemml",
"path": "src/main/java/org/apache/sysds/runtime/compress/colgroup/offset/OffsetFactory.java",
"license": "apache-2.0",
"size": 3921
} | [
"org.apache.sysds.runtime.compress.DMLCompressionException"
] | import org.apache.sysds.runtime.compress.DMLCompressionException; | import org.apache.sysds.runtime.compress.*; | [
"org.apache.sysds"
] | org.apache.sysds; | 1,699,308 |
public static long getLastNotificationTimeInMillis(Context context) {
String lastNotificationKey = context.getString(R.string.pref_last_notification);
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context);
long lastNotificationTime = sp.getLong(lastNotificationKey, 0);
return lastNotificationTime;
} | static long function(Context context) { String lastNotificationKey = context.getString(R.string.pref_last_notification); SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); long lastNotificationTime = sp.getLong(lastNotificationKey, 0); return lastNotificationTime; } | /**
* Returns the last time that a notification was shown (in UNIX time)
*
* @param context Used to access SharedPreferences
* @return UNIX time of when the last notification was shown
*/ | Returns the last time that a notification was shown (in UNIX time) | getLastNotificationTimeInMillis | {
"repo_name": "kuwatly/GoUbiquitous",
"path": "app/src/main/java/com/example/android/sunshine/data/SunshinePreferences.java",
"license": "apache-2.0",
"size": 11775
} | [
"android.content.Context",
"android.content.SharedPreferences",
"android.preference.PreferenceManager"
] | import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; | import android.content.*; import android.preference.*; | [
"android.content",
"android.preference"
] | android.content; android.preference; | 2,439,596 |
public Table aggregate(String colName1, AggregateFunction<?, ?>... functions) {
ArrayListMultimap<String, AggregateFunction<?, ?>> columnFunctionMap =
ArrayListMultimap.create();
columnFunctionMap.putAll(colName1, Lists.newArrayList(functions));
return aggregate(columnFunctionMap);
} | Table function(String colName1, AggregateFunction<?, ?>... functions) { ArrayListMultimap<String, AggregateFunction<?, ?>> columnFunctionMap = ArrayListMultimap.create(); columnFunctionMap.putAll(colName1, Lists.newArrayList(functions)); return aggregate(columnFunctionMap); } | /**
* Applies the given aggregation to the given column. The apply and combine steps of a
* split-apply-combine.
*/ | Applies the given aggregation to the given column. The apply and combine steps of a split-apply-combine | aggregate | {
"repo_name": "axkr/symja_android_library",
"path": "symja_android_library/matheclipse-io/src/main/java/tech/tablesaw/table/TableSliceGroup.java",
"license": "gpl-3.0",
"size": 7152
} | [
"com.google.common.collect.ArrayListMultimap",
"com.google.common.collect.Lists",
"tech.tablesaw.aggregate.AggregateFunction",
"tech.tablesaw.api.Table"
] | import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import tech.tablesaw.aggregate.AggregateFunction; import tech.tablesaw.api.Table; | import com.google.common.collect.*; import tech.tablesaw.aggregate.*; import tech.tablesaw.api.*; | [
"com.google.common",
"tech.tablesaw.aggregate",
"tech.tablesaw.api"
] | com.google.common; tech.tablesaw.aggregate; tech.tablesaw.api; | 2,180,271 |
private void writeObject(ObjectOutputStream stream)
throws IOException {
// Perform default writing first.
stream.defaultWriteObject();
// Write the separator type code.
stream.writeInt(separator.getTypeCode());
} | void function(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); stream.writeInt(separator.getTypeCode()); } | /**
* Overrides writeObject() to serialize the separator type.
*/ | Overrides writeObject() to serialize the separator type | writeObject | {
"repo_name": "arturog8m/ocs",
"path": "bundle/edu.gemini.pot/src/main/java/edu/gemini/spModel/target/system/CoordinateFormat.java",
"license": "bsd-3-clause",
"size": 6568
} | [
"java.io.IOException",
"java.io.ObjectOutputStream"
] | import java.io.IOException; import java.io.ObjectOutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,969,906 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<DeletedSiteInner>> listSinglePageAsync() {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
this.client.getApiVersion(),
context))
.<PagedResponse<DeletedSiteInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext())));
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<DeletedSiteInner>> function() { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), this.client.getSubscriptionId(), this.client.getApiVersion(), context)) .<PagedResponse<DeletedSiteInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } | /**
* Description for Get all deleted apps for a subscription.
*
* @throws DefaultErrorResponseErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return collection of deleted apps.
*/ | Description for Get all deleted apps for a subscription | listSinglePageAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-appservice/src/main/java/com/azure/resourcemanager/appservice/implementation/DeletedWebAppsClientImpl.java",
"license": "mit",
"size": 30592
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedResponse",
"com.azure.core.http.rest.PagedResponseBase",
"com.azure.core.util.FluxUtil",
"com.azure.resourcemanager.appservice.fluent.models.DeletedSiteInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.FluxUtil; import com.azure.resourcemanager.appservice.fluent.models.DeletedSiteInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.appservice.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 319,202 |
IScreenshot getThumbnail(int maxW, int maxH); | IScreenshot getThumbnail(int maxW, int maxH); | /**
* Like {@code #getThumbnail()}, but limits the size of the returned image to the specified dimensions.
*
* @see #getThumbnail()
*/ | Like #getThumbnail(), but limits the size of the returned image to the specified dimensions | getThumbnail | {
"repo_name": "anonl/nvlist",
"path": "api/src/main/java/nl/weeaboo/vn/save/ISaveFile.java",
"license": "apache-2.0",
"size": 688
} | [
"nl.weeaboo.vn.image.IScreenshot"
] | import nl.weeaboo.vn.image.IScreenshot; | import nl.weeaboo.vn.image.*; | [
"nl.weeaboo.vn"
] | nl.weeaboo.vn; | 1,255,418 |
public static RuntimeMXBean getRuntimeMx() {
return ManagementFactory.getRuntimeMXBean();
} | static RuntimeMXBean function() { return ManagementFactory.getRuntimeMXBean(); } | /**
* Gets runtime MBean.
*
* @return Runtime MBean.
*/ | Gets runtime MBean | getRuntimeMx | {
"repo_name": "mcherkasov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java",
"license": "apache-2.0",
"size": 316648
} | [
"java.lang.management.ManagementFactory",
"java.lang.management.RuntimeMXBean"
] | import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; | import java.lang.management.*; | [
"java.lang"
] | java.lang; | 2,321,762 |
public void setDayCount(DayCount dayCount) {
JodaBeanUtils.notNull(dayCount, "dayCount");
this._dayCount = dayCount;
} | void function(DayCount dayCount) { JodaBeanUtils.notNull(dayCount, STR); this._dayCount = dayCount; } | /**
* Sets the day count.
* @param dayCount the new value of the property, not null
*/ | Sets the day count | setDayCount | {
"repo_name": "McLeodMoores/starling",
"path": "projects/financial-types/src/main/java/com/opengamma/financial/security/bond/FloatingRateNoteSecurity.java",
"license": "apache-2.0",
"size": 33346
} | [
"com.opengamma.financial.convention.daycount.DayCount",
"org.joda.beans.JodaBeanUtils"
] | import com.opengamma.financial.convention.daycount.DayCount; import org.joda.beans.JodaBeanUtils; | import com.opengamma.financial.convention.daycount.*; import org.joda.beans.*; | [
"com.opengamma.financial",
"org.joda.beans"
] | com.opengamma.financial; org.joda.beans; | 1,250,573 |
static void compileArrayStoreBarrierLong(Assembler asm, BaselineCompilerImpl compiler) {
arayStoreBarrierHelper(asm, compiler, Entrypoints.longArrayWriteBarrierMethod);
} | static void compileArrayStoreBarrierLong(Assembler asm, BaselineCompilerImpl compiler) { arayStoreBarrierHelper(asm, compiler, Entrypoints.longArrayWriteBarrierMethod); } | /**
* Generate code to perform a lastore barrier. On entry the stack holds:
* arrayRef, index, value.
*
* @param asm the assembler to generate the code in
* @param compiler the compiler instance to ensure correct parameter passing
*/ | Generate code to perform a lastore barrier. On entry the stack holds: arrayRef, index, value | compileArrayStoreBarrierLong | {
"repo_name": "CodeOffloading/JikesRVM-CCO",
"path": "jikesrvm-3.1.3/rvm/src/org/jikesrvm/compilers/baseline/ia32/Barriers.java",
"license": "epl-1.0",
"size": 26452
} | [
"org.jikesrvm.ArchitectureSpecific",
"org.jikesrvm.runtime.Entrypoints"
] | import org.jikesrvm.ArchitectureSpecific; import org.jikesrvm.runtime.Entrypoints; | import org.jikesrvm.*; import org.jikesrvm.runtime.*; | [
"org.jikesrvm",
"org.jikesrvm.runtime"
] | org.jikesrvm; org.jikesrvm.runtime; | 418,595 |
public void printContext(PrintWriter out) {
out.println(getMessage());
out.print(context);
} | void function(PrintWriter out) { out.println(getMessage()); out.print(context); } | /**
* Prints the message and context.
*
* @param out {@code non-null;} where to print to
*/ | Prints the message and context | printContext | {
"repo_name": "janicduplessis/buck",
"path": "third-party/java/dx/src/com/android/dex/util/ExceptionWithContext.java",
"license": "apache-2.0",
"size": 4092
} | [
"java.io.PrintWriter"
] | import java.io.PrintWriter; | import java.io.*; | [
"java.io"
] | java.io; | 2,554,733 |
@Source("com/google/appinventor/images/iOSBookmarkBlack.png")
ImageResource bookIconBlack(); | @Source(STR) ImageResource bookIconBlack(); | /**
* Phone status bar containing black book icon for the iOS form in the visual designer
*/ | Phone status bar containing black book icon for the iOS form in the visual designer | bookIconBlack | {
"repo_name": "jisqyv/appinventor-sources",
"path": "appinventor/appengine/src/com/google/appinventor/client/Images.java",
"license": "apache-2.0",
"size": 19490
} | [
"com.google.gwt.resources.client.ImageResource"
] | import com.google.gwt.resources.client.ImageResource; | import com.google.gwt.resources.client.*; | [
"com.google.gwt"
] | com.google.gwt; | 1,697,570 |
public void setUserStatus(UserDTO user, UserStatusDTO status, Date today, Integer executorId);
| void function(UserDTO user, UserStatusDTO status, Date today, Integer executorId); | /**
* Sets the users status.
*
* @param user user
* @param status status to set
* @param today today's date
* @param executorId executor id
*/ | Sets the users status | setUserStatus | {
"repo_name": "zoranh/SeavusJB3",
"path": "src/java/com/sapienter/jbilling/server/process/task/IAgeingTask.java",
"license": "agpl-3.0",
"size": 3956
} | [
"com.sapienter.jbilling.server.user.db.UserDTO",
"com.sapienter.jbilling.server.user.db.UserStatusDTO",
"java.util.Date"
] | import com.sapienter.jbilling.server.user.db.UserDTO; import com.sapienter.jbilling.server.user.db.UserStatusDTO; import java.util.Date; | import com.sapienter.jbilling.server.user.db.*; import java.util.*; | [
"com.sapienter.jbilling",
"java.util"
] | com.sapienter.jbilling; java.util; | 2,608,780 |
@Override
public AbstractFunctionHolder findExactFunction(FunctionCall functionCall, boolean
allowGandivaFunctions) {
FunctionResolver functionResolver = FunctionResolverFactory.getExactResolver(functionCall);
return getMatchingFunctionHolder(functionCall, functionResolver, allowGandivaFunctions);
} | AbstractFunctionHolder function(FunctionCall functionCall, boolean allowGandivaFunctions) { FunctionResolver functionResolver = FunctionResolverFactory.getExactResolver(functionCall); return getMatchingFunctionHolder(functionCall, functionResolver, allowGandivaFunctions); } | /**
* Using the given <code>functionResolver</code> find Dremio function implementation for given
* <code>functionCall</code>
*
* @param functionCall
* @param allowGandivaFunctions
* @return
*/ | Using the given <code>functionResolver</code> find Dremio function implementation for given <code>functionCall</code> | findExactFunction | {
"repo_name": "dremio/dremio-oss",
"path": "sabot/kernel/src/main/java/com/dremio/exec/expr/fn/FunctionImplementationRegistry.java",
"license": "apache-2.0",
"size": 8857
} | [
"com.dremio.common.expression.FunctionCall",
"com.dremio.exec.resolver.FunctionResolver",
"com.dremio.exec.resolver.FunctionResolverFactory"
] | import com.dremio.common.expression.FunctionCall; import com.dremio.exec.resolver.FunctionResolver; import com.dremio.exec.resolver.FunctionResolverFactory; | import com.dremio.common.expression.*; import com.dremio.exec.resolver.*; | [
"com.dremio.common",
"com.dremio.exec"
] | com.dremio.common; com.dremio.exec; | 2,415,044 |
public void delete() {
final Map<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotException(shardId, "Failed to list content of gateway", e);
}
Tuple<BlobStoreIndexShardSnapshots, Integer> tuple = buildBlobStoreIndexShardSnapshots(blobs);
BlobStoreIndexShardSnapshots snapshots = tuple.v1();
int fileListGeneration = tuple.v2();
try {
indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getName());
} catch (IOException e) {
logger.debug("[{}] [{}] failed to delete shard snapshot file", shardId, snapshotId);
}
// Build a list of snapshots that should be preserved
List<SnapshotFiles> newSnapshotsList = new ArrayList<>();
for (SnapshotFiles point : snapshots) {
if (!point.snapshot().equals(snapshotId.getName())) {
newSnapshotsList.add(point);
}
}
// finalize the snapshot and rewrite the snapshot index with the next sequential snapshot index
finalize(newSnapshotsList, fileListGeneration + 1, blobs);
} | void function() { final Map<String, BlobMetaData> blobs; try { blobs = blobContainer.listBlobs(); } catch (IOException e) { throw new IndexShardSnapshotException(shardId, STR, e); } Tuple<BlobStoreIndexShardSnapshots, Integer> tuple = buildBlobStoreIndexShardSnapshots(blobs); BlobStoreIndexShardSnapshots snapshots = tuple.v1(); int fileListGeneration = tuple.v2(); try { indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getName()); } catch (IOException e) { logger.debug(STR, shardId, snapshotId); } List<SnapshotFiles> newSnapshotsList = new ArrayList<>(); for (SnapshotFiles point : snapshots) { if (!point.snapshot().equals(snapshotId.getName())) { newSnapshotsList.add(point); } } finalize(newSnapshotsList, fileListGeneration + 1, blobs); } | /**
* Delete shard snapshot
*/ | Delete shard snapshot | delete | {
"repo_name": "sreeramjayan/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java",
"license": "apache-2.0",
"size": 76516
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"org.elasticsearch.common.blobstore.BlobMetaData",
"org.elasticsearch.common.collect.Tuple",
"org.elasticsearch.index.snapshots.IndexShardSnapshotException",
"org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots",
"org.elasticsearch.index.snapshots.blobstore.SnapshotFiles"
] | import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.index.snapshots.IndexShardSnapshotException; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; import org.elasticsearch.index.snapshots.blobstore.SnapshotFiles; | import java.io.*; import java.util.*; import org.elasticsearch.common.blobstore.*; import org.elasticsearch.common.collect.*; import org.elasticsearch.index.snapshots.*; import org.elasticsearch.index.snapshots.blobstore.*; | [
"java.io",
"java.util",
"org.elasticsearch.common",
"org.elasticsearch.index"
] | java.io; java.util; org.elasticsearch.common; org.elasticsearch.index; | 1,458,969 |
private static SearchProgressDialog getSearchProgressDialog() {
if (searchProgressDialog == null) {
searchProgressDialog = new SearchProgressDialog(StaticSwingTools.getFirstParentFrame(
ComponentRegistry.getRegistry().getDescriptionPane()),
dscs);
}
return searchProgressDialog;
} | static SearchProgressDialog function() { if (searchProgressDialog == null) { searchProgressDialog = new SearchProgressDialog(StaticSwingTools.getFirstParentFrame( ComponentRegistry.getRegistry().getDescriptionPane()), dscs); } return searchProgressDialog; } | /**
* DOCUMENT ME!
*
* @return DOCUMENT ME!
*/ | DOCUMENT ME | getSearchProgressDialog | {
"repo_name": "cismet/cids-navigator",
"path": "src/main/java/Sirius/navigator/search/CidsSearchExecutor.java",
"license": "gpl-3.0",
"size": 13548
} | [
"de.cismet.tools.gui.StaticSwingTools"
] | import de.cismet.tools.gui.StaticSwingTools; | import de.cismet.tools.gui.*; | [
"de.cismet.tools"
] | de.cismet.tools; | 2,047,973 |
public MeteorStats getMeteorStats(String test, String reference) {
// Normalize both
if (normalize) {
test = Normalizer.normalizeLine(test, langID, keepPunctuation);
reference = Normalizer.normalizeLine(reference, langID,
keepPunctuation);
}
// Lowercase both
if (lowerCase) {
test = test.toLowerCase();
reference = reference.toLowerCase();
}
// Score
Alignment alignment = aligner.align(test, reference);
return getMeteorStats(alignment);
} | MeteorStats function(String test, String reference) { if (normalize) { test = Normalizer.normalizeLine(test, langID, keepPunctuation); reference = Normalizer.normalizeLine(reference, langID, keepPunctuation); } if (lowerCase) { test = test.toLowerCase(); reference = reference.toLowerCase(); } Alignment alignment = aligner.align(test, reference); return getMeteorStats(alignment); } | /**
* Get the Meteor sufficient statistics for a test / reference pair
*
* @param test
* @param reference
* @return
*/ | Get the Meteor sufficient statistics for a test / reference pair | getMeteorStats | {
"repo_name": "cmu-mtlab/meteor",
"path": "src/edu/cmu/meteor/scorer/MeteorScorer.java",
"license": "lgpl-2.1",
"size": 13339
} | [
"edu.cmu.meteor.aligner.Alignment",
"edu.cmu.meteor.util.Normalizer"
] | import edu.cmu.meteor.aligner.Alignment; import edu.cmu.meteor.util.Normalizer; | import edu.cmu.meteor.aligner.*; import edu.cmu.meteor.util.*; | [
"edu.cmu.meteor"
] | edu.cmu.meteor; | 161,856 |
@Override
public void setExtra(byte[] extra) throws RuntimeException
{
try
{
ZipExtraField[] local = ExtraFieldUtils.parse(extra, true, ExtraFieldUtils.UnparseableExtraField.READ);
mergeExtraFields(local, true);
}
catch (ZipException e)
{
// actually this is not be possible as of Ant 1.8.1
throw new RuntimeException("Error parsing extra fields for entry: " + getName() + " - " + e.getMessage(), e);
}
} | void function(byte[] extra) throws RuntimeException { try { ZipExtraField[] local = ExtraFieldUtils.parse(extra, true, ExtraFieldUtils.UnparseableExtraField.READ); mergeExtraFields(local, true); } catch (ZipException e) { throw new RuntimeException(STR + getName() + STR + e.getMessage(), e); } } | /**
* Parses the given bytes as extra field data and consumes any unparseable
* data as an {@link UnparseableExtraFieldData} instance.
*
* @param extra
* an array of bytes to be parsed into extra fields
* @throws RuntimeException
* if the bytes cannot be parsed
* @since 1.1
* @throws RuntimeException
* on error
*/ | Parses the given bytes as extra field data and consumes any unparseable data as an <code>UnparseableExtraFieldData</code> instance | setExtra | {
"repo_name": "zyhndesign/JiangHomeStyle_Android_Phone",
"path": "src/org/apache/tools/zip/ZipEntry.java",
"license": "apache-2.0",
"size": 20140
} | [
"java.util.zip.ZipException"
] | import java.util.zip.ZipException; | import java.util.zip.*; | [
"java.util"
] | java.util; | 1,988,237 |
public void deleteRelationshipIndex(EntityMetadata entityMetadata, GraphDatabaseService graphDb,
Relationship relationship)
{
if (!isRelationshipAutoIndexingEnabled(graphDb) && entityMetadata.isIndexable())
{
Index<Relationship> relationshipIndex = graphDb.index().forRelationships(entityMetadata.getIndexName());
relationshipIndex.remove(relationship);
}
} | void function(EntityMetadata entityMetadata, GraphDatabaseService graphDb, Relationship relationship) { if (!isRelationshipAutoIndexingEnabled(graphDb) && entityMetadata.isIndexable()) { Index<Relationship> relationshipIndex = graphDb.index().forRelationships(entityMetadata.getIndexName()); relationshipIndex.remove(relationship); } } | /**
* Deletes a {@link Relationship} from manually created index if
* auto-indexing is disabled
*
* @param entityMetadata
* @param graphDb
* @param relationship
*/ | Deletes a <code>Relationship</code> from manually created index if auto-indexing is disabled | deleteRelationshipIndex | {
"repo_name": "ravisund/Kundera",
"path": "src/kundera-neo4j/src/main/java/com/impetus/client/neo4j/index/Neo4JIndexManager.java",
"license": "apache-2.0",
"size": 11151
} | [
"com.impetus.kundera.metadata.model.EntityMetadata",
"org.neo4j.graphdb.GraphDatabaseService",
"org.neo4j.graphdb.Relationship",
"org.neo4j.graphdb.index.Index"
] | import com.impetus.kundera.metadata.model.EntityMetadata; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.index.Index; | import com.impetus.kundera.metadata.model.*; import org.neo4j.graphdb.*; import org.neo4j.graphdb.index.*; | [
"com.impetus.kundera",
"org.neo4j.graphdb"
] | com.impetus.kundera; org.neo4j.graphdb; | 1,763,915 |
@Override public void exitExprUnload(@NotNull QueryGrammarParser.ExprUnloadContext ctx) { } | @Override public void exitExprUnload(@NotNull QueryGrammarParser.ExprUnloadContext ctx) { } | /**
* {@inheritDoc}
* <p/>
* The default implementation does nothing.
*/ | The default implementation does nothing | enterExprUnload | {
"repo_name": "pmeisen/dis-timeintervaldataanalyzer",
"path": "src/net/meisen/dissertation/impl/parser/query/generated/QueryGrammarBaseListener.java",
"license": "bsd-3-clause",
"size": 33327
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 1,760,946 |
ListConfig findListConfig(String name); | ListConfig findListConfig(String name); | /**
* Finds existing List config.
*
* @param name name of the config
* @return List config or {@code null} when requested List configuration does not exist
*/ | Finds existing List config | findListConfig | {
"repo_name": "tufangorel/hazelcast",
"path": "hazelcast/src/main/java/com/hazelcast/internal/dynamicconfig/ConfigurationService.java",
"license": "apache-2.0",
"size": 13507
} | [
"com.hazelcast.config.ListConfig"
] | import com.hazelcast.config.ListConfig; | import com.hazelcast.config.*; | [
"com.hazelcast.config"
] | com.hazelcast.config; | 2,503,271 |
public Object getObject(Session session, Class clazz, String path) throws ObjectContentManagerException; | Object function(Session session, Class clazz, String path) throws ObjectContentManagerException; | /**
* Retrieve an object from the JCR repo
*
* @param session The JCR session
* @param clazz The class assigned to the object to retrieve
* @param path the JCR path
* @return The object found or <code>null</code> when it does not exist at <code>path</code>
*
* @throws ObjectContentManagerException when it is not possible to retrieve the object
*/ | Retrieve an object from the JCR repo | getObject | {
"repo_name": "apache/jackrabbit-ocm",
"path": "src/main/java/org/apache/jackrabbit/ocm/manager/objectconverter/ObjectConverter.java",
"license": "apache-2.0",
"size": 5750
} | [
"javax.jcr.Session",
"org.apache.jackrabbit.ocm.exception.ObjectContentManagerException"
] | import javax.jcr.Session; import org.apache.jackrabbit.ocm.exception.ObjectContentManagerException; | import javax.jcr.*; import org.apache.jackrabbit.ocm.exception.*; | [
"javax.jcr",
"org.apache.jackrabbit"
] | javax.jcr; org.apache.jackrabbit; | 579,889 |
this.system = ActorSystemFactory.buildActorSystem();
} | this.system = ActorSystemFactory.buildActorSystem(); } | /**
* Initializes the {@code system} with a concrete implementation before each test.
*/ | Initializes the system with a concrete implementation before each test | init | {
"repo_name": "drigoni/pcd-actors",
"path": "src/test/java/it/unipd/math/pcd/actors/ActorIT.java",
"license": "mit",
"size": 4327
} | [
"it.unipd.math.pcd.actors.utils.ActorSystemFactory"
] | import it.unipd.math.pcd.actors.utils.ActorSystemFactory; | import it.unipd.math.pcd.actors.utils.*; | [
"it.unipd.math"
] | it.unipd.math; | 1,499,256 |
public boolean authinfoSimple(String username, String password)
throws IOException
{
send(AUTHINFO_SIMPLE);
StatusResponse response = parseResponse(read());
switch (response.status)
{
case SEND_AUTHINFO_SIMPLE:
StringBuffer buffer = new StringBuffer(username);
buffer.append(' ');
buffer.append(password);
send(buffer.toString());
response = parseResponse(read());
switch (response.status)
{
case AUTHINFO_SIMPLE_OK:
return true;
case AUTHINFO_SIMPLE_DENIED:
return false;
default:throw new NNTPException(response);
}
default:
throw new NNTPException(response);
}
}
// RFC2980:3.1.3 AUTHINFO GENERIC | boolean function(String username, String password) throws IOException { send(AUTHINFO_SIMPLE); StatusResponse response = parseResponse(read()); switch (response.status) { case SEND_AUTHINFO_SIMPLE: StringBuffer buffer = new StringBuffer(username); buffer.append(' '); buffer.append(password); send(buffer.toString()); response = parseResponse(read()); switch (response.status) { case AUTHINFO_SIMPLE_OK: return true; case AUTHINFO_SIMPLE_DENIED: return false; default:throw new NNTPException(response); } default: throw new NNTPException(response); } } | /**
* Implementation of NNTP simple authentication.
* Note that use of this authentication strategy is highly deprecated,
* only use on servers that won't accept any other form of authentication.
*/ | Implementation of NNTP simple authentication. Note that use of this authentication strategy is highly deprecated, only use on servers that won't accept any other form of authentication | authinfoSimple | {
"repo_name": "SeekingFor/jfniki",
"path": "alien/src/gnu/inet/nntp/NNTPConnection.java",
"license": "gpl-2.0",
"size": 42972
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,708,296 |
private List<StageWrapper> buildHosts(UpgradeContext upgradeContext, List<String> hosts) {
if (CollectionUtils.isEmpty(hosts)) {
return Collections.emptyList();
}
Cluster cluster = upgradeContext.getCluster();
List<StageWrapper> wrappers = new ArrayList<>();
HostRoleCommandFactory hrcFactory = upgradeContext.getHostRoleCommandFactory();
// get a role command order instance that we can adjust for HOU since HOU
// may use a different ordering than normal start operations
RoleCommandOrder roleCommandOrder = getRoleCommandOrderForUpgrade(cluster);
for (String hostName : hosts) {
// initialize the collection for all stop tasks for every component on
// the host
List<TaskWrapper> stopTasks = new ArrayList<>();
// initialize the collection which will be passed into the RoleGraph for
// ordering
Map<String, Map<String, HostRoleCommand>> restartCommandsForHost = new HashMap<>();
Map<String, HostRoleCommand> restartCommandsByRole = new HashMap<>();
restartCommandsForHost.put(hostName, restartCommandsByRole);
// iterating over every host component, build the commands
for (ServiceComponentHost sch : cluster.getServiceComponentHosts(hostName)) {
if (!isVersionAdvertised(upgradeContext, sch)) {
continue;
}
HostsType hostsType = upgradeContext.getResolver().getMasterAndHosts(
sch.getServiceName(), sch.getServiceComponentName());
// !!! if the hosts do not contain the current one, that means the component
// either doesn't exist or the downgrade is to the current target version.
// hostsType better not be null either, but check anyway
if (null != hostsType && !hostsType.hosts.contains(hostName)) {
RepositoryVersionEntity targetRepositoryVersion = upgradeContext.getTargetRepositoryVersion(
sch.getServiceName());
LOG.warn("Host {} could not be orchestrated. Either there are no components for {}/{} " +
"or the target version {} is already current.",
hostName, sch.getServiceName(), sch.getServiceComponentName(),
targetRepositoryVersion.getVersion());
continue;
}
// create a STOP task for this host component
if (!sch.isClientComponent()) {
stopTasks.add(new TaskWrapper(sch.getServiceName(), sch.getServiceComponentName(),
Collections.singleton(hostName), new StopTask()));
}
// generate a placeholder HRC that can be used to generate the
// dependency graph - we must use START here since that's what the
// role command order is defined with - each of these will turn into a
// RESTART when we create the wrappers later on
Role role = Role.valueOf(sch.getServiceComponentName());
HostRoleCommand hostRoleCommand = hrcFactory.create(hostName, role, null,
RoleCommand.START);
// add the newly created HRC RESTART
restartCommandsByRole.put(role.name(), hostRoleCommand);
}
// short circuit and move to the next host if there are no commands
if (stopTasks.isEmpty() && restartCommandsByRole.isEmpty()) {
LOG.info("There were no {} commands generated for {}",
upgradeContext.getDirection().getText(false), hostName);
continue;
}
// now process the HRCs created so that we can create the appropriate
// stage/task wrappers for the RESTARTs
RoleGraphFactory roleGraphFactory = upgradeContext.getRoleGraphFactory();
RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder);
List<Map<String, List<HostRoleCommand>>> stages = roleGraph.getOrderedHostRoleCommands(
restartCommandsForHost);
// initialize the list of stage wrappers
List<StageWrapper> stageWrappers = new ArrayList<>();
// for every stage, create a stage wrapper around the tasks
int phaseCounter = 1;
for (Map<String, List<HostRoleCommand>> stage : stages) {
List<HostRoleCommand> stageCommandsForHost = stage.get(hostName);
String stageTitle = String.format("Starting components on %s (phase %d)", hostName,
phaseCounter++);
// create task wrappers
List<TaskWrapper> taskWrappers = new ArrayList<>();
for (HostRoleCommand command : stageCommandsForHost) {
StackId stackId = upgradeContext.getRepositoryVersion().getStackId();
String componentName = command.getRole().name();
String serviceName = null;
try {
AmbariMetaInfo ambariMetaInfo = upgradeContext.getAmbariMetaInfo();
serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(),
stackId.getStackVersion(), componentName);
} catch (AmbariException ambariException) {
LOG.error("Unable to lookup service by component {} for stack {}-{}", componentName,
stackId.getStackName(), stackId.getStackVersion());
}
TaskWrapper taskWrapper = new TaskWrapper(serviceName, componentName,
Collections.singleton(hostName), new RestartTask());
taskWrappers.add(taskWrapper);
}
if (!taskWrappers.isEmpty()) {
StageWrapper startWrapper = new StageWrapper(StageWrapper.Type.RESTART, stageTitle,
taskWrappers.toArray(new TaskWrapper[taskWrappers.size()]));
stageWrappers.add(startWrapper);
}
}
// create the manual task between the STOP and START stages
ManualTask mt = new ManualTask();
String message = String.format("Please acknowledge that host %s has been prepared.", hostName);
mt.messages.add(message);
JsonObject structuredOut = new JsonObject();
structuredOut.addProperty(TYPE, HostOrderItem.HostOrderActionType.HOST_UPGRADE.toString());
structuredOut.addProperty(HOST, hostName);
mt.structuredOut = structuredOut.toString();
// build the single STOP stage, but only if there are components to
// stop; client-only hosts have no components which need stopping
if (!stopTasks.isEmpty()) {
StageWrapper stopWrapper = new StageWrapper(StageWrapper.Type.STOP,
String.format("Stop on %s", hostName),
stopTasks.toArray(new TaskWrapper[stopTasks.size()]));
wrappers.add(stopWrapper);
}
StageWrapper manualWrapper = new StageWrapper(StageWrapper.Type.SERVER_SIDE_ACTION, "Manual Confirmation",
new TaskWrapper(null, null, Collections.emptySet(), mt));
wrappers.add(manualWrapper);
// !!! TODO install_packages for hdp and conf-select changes. Hopefully these will no-op.
wrappers.addAll(stageWrappers);
}
return wrappers;
} | List<StageWrapper> function(UpgradeContext upgradeContext, List<String> hosts) { if (CollectionUtils.isEmpty(hosts)) { return Collections.emptyList(); } Cluster cluster = upgradeContext.getCluster(); List<StageWrapper> wrappers = new ArrayList<>(); HostRoleCommandFactory hrcFactory = upgradeContext.getHostRoleCommandFactory(); RoleCommandOrder roleCommandOrder = getRoleCommandOrderForUpgrade(cluster); for (String hostName : hosts) { List<TaskWrapper> stopTasks = new ArrayList<>(); Map<String, Map<String, HostRoleCommand>> restartCommandsForHost = new HashMap<>(); Map<String, HostRoleCommand> restartCommandsByRole = new HashMap<>(); restartCommandsForHost.put(hostName, restartCommandsByRole); for (ServiceComponentHost sch : cluster.getServiceComponentHosts(hostName)) { if (!isVersionAdvertised(upgradeContext, sch)) { continue; } HostsType hostsType = upgradeContext.getResolver().getMasterAndHosts( sch.getServiceName(), sch.getServiceComponentName()); if (null != hostsType && !hostsType.hosts.contains(hostName)) { RepositoryVersionEntity targetRepositoryVersion = upgradeContext.getTargetRepositoryVersion( sch.getServiceName()); LOG.warn(STR + STR, hostName, sch.getServiceName(), sch.getServiceComponentName(), targetRepositoryVersion.getVersion()); continue; } if (!sch.isClientComponent()) { stopTasks.add(new TaskWrapper(sch.getServiceName(), sch.getServiceComponentName(), Collections.singleton(hostName), new StopTask())); } Role role = Role.valueOf(sch.getServiceComponentName()); HostRoleCommand hostRoleCommand = hrcFactory.create(hostName, role, null, RoleCommand.START); restartCommandsByRole.put(role.name(), hostRoleCommand); } if (stopTasks.isEmpty() && restartCommandsByRole.isEmpty()) { LOG.info(STR, upgradeContext.getDirection().getText(false), hostName); continue; } RoleGraphFactory roleGraphFactory = upgradeContext.getRoleGraphFactory(); RoleGraph roleGraph = roleGraphFactory.createNew(roleCommandOrder); List<Map<String, List<HostRoleCommand>>> stages = roleGraph.getOrderedHostRoleCommands( restartCommandsForHost); List<StageWrapper> stageWrappers = new ArrayList<>(); int phaseCounter = 1; for (Map<String, List<HostRoleCommand>> stage : stages) { List<HostRoleCommand> stageCommandsForHost = stage.get(hostName); String stageTitle = String.format(STR, hostName, phaseCounter++); List<TaskWrapper> taskWrappers = new ArrayList<>(); for (HostRoleCommand command : stageCommandsForHost) { StackId stackId = upgradeContext.getRepositoryVersion().getStackId(); String componentName = command.getRole().name(); String serviceName = null; try { AmbariMetaInfo ambariMetaInfo = upgradeContext.getAmbariMetaInfo(); serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(), stackId.getStackVersion(), componentName); } catch (AmbariException ambariException) { LOG.error(STR, componentName, stackId.getStackName(), stackId.getStackVersion()); } TaskWrapper taskWrapper = new TaskWrapper(serviceName, componentName, Collections.singleton(hostName), new RestartTask()); taskWrappers.add(taskWrapper); } if (!taskWrappers.isEmpty()) { StageWrapper startWrapper = new StageWrapper(StageWrapper.Type.RESTART, stageTitle, taskWrappers.toArray(new TaskWrapper[taskWrappers.size()])); stageWrappers.add(startWrapper); } } ManualTask mt = new ManualTask(); String message = String.format(STR, hostName); mt.messages.add(message); JsonObject structuredOut = new JsonObject(); structuredOut.addProperty(TYPE, HostOrderItem.HostOrderActionType.HOST_UPGRADE.toString()); structuredOut.addProperty(HOST, hostName); mt.structuredOut = structuredOut.toString(); if (!stopTasks.isEmpty()) { StageWrapper stopWrapper = new StageWrapper(StageWrapper.Type.STOP, String.format(STR, hostName), stopTasks.toArray(new TaskWrapper[stopTasks.size()])); wrappers.add(stopWrapper); } StageWrapper manualWrapper = new StageWrapper(StageWrapper.Type.SERVER_SIDE_ACTION, STR, new TaskWrapper(null, null, Collections.emptySet(), mt)); wrappers.add(manualWrapper); wrappers.addAll(stageWrappers); } return wrappers; } | /**
* Builds the stages for each host which typically consist of a STOP, a
* manual wait, and a START. The starting of components can be a single
* stage or may consist of several stages if the host components have
* dependencies on each other.
*
* @param upgradeContext
* the context
* @param hosts
* the list of hostnames
* @return the wrappers for a host
*/ | Builds the stages for each host which typically consist of a STOP, a manual wait, and a START. The starting of components can be a single stage or may consist of several stages if the host components have dependencies on each other | buildHosts | {
"repo_name": "radicalbit/ambari",
"path": "ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/HostOrderGrouping.java",
"license": "apache-2.0",
"size": 14866
} | [
"com.google.gson.JsonObject",
"java.util.ArrayList",
"java.util.Collections",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"org.apache.ambari.server.AmbariException",
"org.apache.ambari.server.Role",
"org.apache.ambari.server.RoleCommand",
"org.apache.ambari.server.actionmanager.HostRoleCommand",
"org.apache.ambari.server.actionmanager.HostRoleCommandFactory",
"org.apache.ambari.server.api.services.AmbariMetaInfo",
"org.apache.ambari.server.metadata.RoleCommandOrder",
"org.apache.ambari.server.orm.entities.RepositoryVersionEntity",
"org.apache.ambari.server.stack.HostsType",
"org.apache.ambari.server.stageplanner.RoleGraph",
"org.apache.ambari.server.stageplanner.RoleGraphFactory",
"org.apache.ambari.server.state.Cluster",
"org.apache.ambari.server.state.ServiceComponentHost",
"org.apache.ambari.server.state.StackId",
"org.apache.ambari.server.state.UpgradeContext",
"org.apache.commons.collections.CollectionUtils"
] | import com.google.gson.JsonObject; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.Role; import org.apache.ambari.server.RoleCommand; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.HostRoleCommandFactory; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.metadata.RoleCommandOrder; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.stack.HostsType; import org.apache.ambari.server.stageplanner.RoleGraph; import org.apache.ambari.server.stageplanner.RoleGraphFactory; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.UpgradeContext; import org.apache.commons.collections.CollectionUtils; | import com.google.gson.*; import java.util.*; import org.apache.ambari.server.*; import org.apache.ambari.server.actionmanager.*; import org.apache.ambari.server.api.services.*; import org.apache.ambari.server.metadata.*; import org.apache.ambari.server.orm.entities.*; import org.apache.ambari.server.stack.*; import org.apache.ambari.server.stageplanner.*; import org.apache.ambari.server.state.*; import org.apache.commons.collections.*; | [
"com.google.gson",
"java.util",
"org.apache.ambari",
"org.apache.commons"
] | com.google.gson; java.util; org.apache.ambari; org.apache.commons; | 1,969,186 |
@Override
public Enumeration<String> getAttributeNames()
{
HashMapImpl<String,Object> attributes = _attributes;
if (attributes != null) {
return Collections.enumeration(attributes.keySet());
}
else if (isSecure()) {
_attributes = new HashMapImpl<String,Object>();
attributes = _attributes;
_request.initAttributes(this);
return Collections.enumeration(attributes.keySet());
}
else
return NullEnumeration.create();
} | Enumeration<String> function() { HashMapImpl<String,Object> attributes = _attributes; if (attributes != null) { return Collections.enumeration(attributes.keySet()); } else if (isSecure()) { _attributes = new HashMapImpl<String,Object>(); attributes = _attributes; _request.initAttributes(this); return Collections.enumeration(attributes.keySet()); } else return NullEnumeration.create(); } | /**
* Returns an enumeration of the request attribute names.
*/ | Returns an enumeration of the request attribute names | getAttributeNames | {
"repo_name": "christianchristensen/resin",
"path": "modules/resin/src/com/caucho/server/http/HttpServletRequestImpl.java",
"license": "gpl-2.0",
"size": 55697
} | [
"com.caucho.util.HashMapImpl",
"com.caucho.util.NullEnumeration",
"java.util.Collections",
"java.util.Enumeration"
] | import com.caucho.util.HashMapImpl; import com.caucho.util.NullEnumeration; import java.util.Collections; import java.util.Enumeration; | import com.caucho.util.*; import java.util.*; | [
"com.caucho.util",
"java.util"
] | com.caucho.util; java.util; | 311,204 |
public static String getHistoryLink(CmsObject cms, CmsUUID structureId, String version) {
String resourcePath;
CmsResource resource;
try {
resource = cms.readResource(structureId, CmsResourceFilter.ALL);
resourcePath = resource.getRootPath();
} catch (CmsException e) {
throw new CmsRuntimeException(e.getMessageContainer(), e);
}
StringBuffer link = new StringBuffer();
link.append(CmsHistoryResourceHandler.HISTORY_HANDLER);
link.append(resourcePath);
link.append('?');
link.append(CmsHistoryResourceHandler.PARAM_VERSION);
link.append('=');
link.append(getVersion("" + version));
return link.toString();
}
| static String function(CmsObject cms, CmsUUID structureId, String version) { String resourcePath; CmsResource resource; try { resource = cms.readResource(structureId, CmsResourceFilter.ALL); resourcePath = resource.getRootPath(); } catch (CmsException e) { throw new CmsRuntimeException(e.getMessageContainer(), e); } StringBuffer link = new StringBuffer(); link.append(CmsHistoryResourceHandler.HISTORY_HANDLER); link.append(resourcePath); link.append('?'); link.append(CmsHistoryResourceHandler.PARAM_VERSION); link.append('='); link.append(getVersion("" + version)); return link.toString(); } | /**
* Returns the link to an historical file.<p>
*
* @param cms the cms context
* @param structureId the structure id of the file
* @param version the version number of the file
*
* @return the link to an historical file
*/ | Returns the link to an historical file | getHistoryLink | {
"repo_name": "comundus/opencms-comundus",
"path": "src/main/java/org/opencms/workplace/commons/CmsHistoryList.java",
"license": "lgpl-2.1",
"size": 35129
} | [
"org.opencms.file.CmsObject",
"org.opencms.file.CmsResource",
"org.opencms.file.CmsResourceFilter",
"org.opencms.file.history.CmsHistoryResourceHandler",
"org.opencms.main.CmsException",
"org.opencms.main.CmsRuntimeException",
"org.opencms.util.CmsUUID"
] | import org.opencms.file.CmsObject; import org.opencms.file.CmsResource; import org.opencms.file.CmsResourceFilter; import org.opencms.file.history.CmsHistoryResourceHandler; import org.opencms.main.CmsException; import org.opencms.main.CmsRuntimeException; import org.opencms.util.CmsUUID; | import org.opencms.file.*; import org.opencms.file.history.*; import org.opencms.main.*; import org.opencms.util.*; | [
"org.opencms.file",
"org.opencms.main",
"org.opencms.util"
] | org.opencms.file; org.opencms.main; org.opencms.util; | 236,892 |
public void injectBlocks(int dataNodeIndex, Iterable<Block> blocksToInject) throws IOException {
if (dataNodeIndex < 0 || dataNodeIndex > dataNodes.size()) {
throw new IndexOutOfBoundsException();
}
FSDatasetInterface dataSet = dataNodes.get(dataNodeIndex).datanode.getFSDataset();
if (!(dataSet instanceof SimulatedFSDataset)) {
throw new IOException("injectBlocks is valid only for SimilatedFSDataset");
}
String bpid = getNamesystem().getBlockPoolId();
SimulatedFSDataset sdataset = (SimulatedFSDataset) dataSet;
sdataset.injectBlocks(bpid, blocksToInject);
dataNodes.get(dataNodeIndex).datanode.scheduleAllBlockReport(0);
} | void function(int dataNodeIndex, Iterable<Block> blocksToInject) throws IOException { if (dataNodeIndex < 0 dataNodeIndex > dataNodes.size()) { throw new IndexOutOfBoundsException(); } FSDatasetInterface dataSet = dataNodes.get(dataNodeIndex).datanode.getFSDataset(); if (!(dataSet instanceof SimulatedFSDataset)) { throw new IOException(STR); } String bpid = getNamesystem().getBlockPoolId(); SimulatedFSDataset sdataset = (SimulatedFSDataset) dataSet; sdataset.injectBlocks(bpid, blocksToInject); dataNodes.get(dataNodeIndex).datanode.scheduleAllBlockReport(0); } | /**
* This method is valid only if the data nodes have simulated data
* @param dataNodeIndex - data node i which to inject - the index is same as for getDataNodes()
* @param blocksToInject - the blocks
* @throws IOException
* if not simulatedFSDataset
* if any of blocks already exist in the data node
*
*/ | This method is valid only if the data nodes have simulated data | injectBlocks | {
"repo_name": "moreus/hadoop",
"path": "hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 74425
} | [
"java.io.IOException",
"org.apache.hadoop.hdfs.protocol.Block",
"org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface",
"org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset"
] | import java.io.IOException; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; | import java.io.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.datanode.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 88,307 |
public Object[] getCells(Rectangle rect) {
return getCells(rect, null);
} | Object[] function(Rectangle rect) { return getCells(rect, null); } | /**
* Returns the cells inside the given rectangle.
*
* @return Returns the cells inside the given rectangle.
*/ | Returns the cells inside the given rectangle | getCells | {
"repo_name": "ModelWriter/WP3",
"path": "Source/eu.modelwriter.visualization.jgrapx/src/com/mxgraph/swing/mxGraphComponent.java",
"license": "epl-1.0",
"size": 106155
} | [
"java.awt.Rectangle"
] | import java.awt.Rectangle; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,708,866 |
public Object[] borrowedBookToArray(Book book) {
return bookQueue.itemQueueToArray(book);
} | Object[] function(Book book) { return bookQueue.itemQueueToArray(book); } | /**
* Converts the corresponding member queue to array.
* @param book the book corresponding to a member queue.
* @return Object[]
*/ | Converts the corresponding member queue to array | borrowedBookToArray | {
"repo_name": "andela-kogunde/Library",
"path": "src/com/andela/library/Library.java",
"license": "mit",
"size": 4096
} | [
"com.andela.library.model.Book"
] | import com.andela.library.model.Book; | import com.andela.library.model.*; | [
"com.andela.library"
] | com.andela.library; | 2,579,502 |
@CheckReturnValue
default List<String> listStrings(String fieldName) {
return list(row -> row.getString(fieldName));
} | default List<String> listStrings(String fieldName) { return list(row -> row.getString(fieldName)); } | /**
* Executes <code>SELECT fieldName FROM ...</code> on the query and returns the result as a list
*/ | Executes <code>SELECT fieldName FROM ...</code> on the query and returns the result as a list | listStrings | {
"repo_name": "jhannes/fluent-jdbc",
"path": "src/main/java/org/fluentjdbc/DbContextListableSelect.java",
"license": "apache-2.0",
"size": 6107
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,353,903 |
protected void addOcciComputeCoresPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_M5_24xlarge_occiComputeCores_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_M5_24xlarge_occiComputeCores_feature", "_UI_M5_24xlarge_type"),
Ec2Package.eINSTANCE.getM5_24xlarge_OcciComputeCores(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
} | void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), Ec2Package.eINSTANCE.getM5_24xlarge_OcciComputeCores(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } | /**
* This adds a property descriptor for the Occi Compute Cores feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Occi Compute Cores feature. | addOcciComputeCoresPropertyDescriptor | {
"repo_name": "occiware/Multi-Cloud-Studio",
"path": "plugins/org.eclipse.cmf.occi.multicloud.aws.ec2.edit/src-gen/org/eclipse/cmf/occi/multicloud/aws/ec2/provider/M5_24xlargeItemProvider.java",
"license": "epl-1.0",
"size": 6095
} | [
"org.eclipse.cmf.occi.multicloud.aws.ec2.Ec2Package",
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor"
] | import org.eclipse.cmf.occi.multicloud.aws.ec2.Ec2Package; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; | import org.eclipse.cmf.occi.multicloud.aws.ec2.*; import org.eclipse.emf.edit.provider.*; | [
"org.eclipse.cmf",
"org.eclipse.emf"
] | org.eclipse.cmf; org.eclipse.emf; | 1,487,529 |
@Test
public void testStartTimer() throws InterruptedException {
final TimedSemaphoreTestImpl semaphore = new TimedSemaphoreTestImpl(PERIOD,
UNIT, LIMIT);
final ScheduledFuture<?> future = semaphore.startTimer();
assertNotNull("No future returned", future);
Thread.sleep(PERIOD);
final int trials = 10;
int count = 0;
do {
Thread.sleep(PERIOD);
if (count++ > trials) {
fail("endOfPeriod() not called!");
}
} while (semaphore.getPeriodEnds() <= 0);
semaphore.shutdown();
} | void function() throws InterruptedException { final TimedSemaphoreTestImpl semaphore = new TimedSemaphoreTestImpl(PERIOD, UNIT, LIMIT); final ScheduledFuture<?> future = semaphore.startTimer(); assertNotNull(STR, future); Thread.sleep(PERIOD); final int trials = 10; int count = 0; do { Thread.sleep(PERIOD); if (count++ > trials) { fail(STR); } } while (semaphore.getPeriodEnds() <= 0); semaphore.shutdown(); } | /**
* Tests starting the timer.
*
* @throws java.lang.InterruptedException so we don't have to catch it
*/ | Tests starting the timer | testStartTimer | {
"repo_name": "xiwc/commons-lang",
"path": "src/test/java/org/apache/commons/lang3/concurrent/TimedSemaphoreTest.java",
"license": "apache-2.0",
"size": 19163
} | [
"java.util.concurrent.ScheduledFuture",
"org.junit.Assert"
] | import java.util.concurrent.ScheduledFuture; import org.junit.Assert; | import java.util.concurrent.*; import org.junit.*; | [
"java.util",
"org.junit"
] | java.util; org.junit; | 1,737,899 |
protected Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)
throws IOException, CorruptedLogFileException {
Path path = file.getPath();
long length = file.getLen();
Reader in;
// Check for possibly empty file. With appends, currently Hadoop reports a
// zero length even if the file has been sync'd. Revisit if HDFS-376 or
// HDFS-878 is committed.
if (length <= 0) {
LOG.warn("File " + path + " might be still open, length is 0");
}
try {
FSUtils.getInstance(fs, conf).recoverFileLease(fs, path, conf, reporter);
try {
in = getReader(path, reporter);
} catch (EOFException e) {
if (length <= 0) {
// TODO should we ignore an empty, not-last log file if skip.errors
// is false? Either way, the caller should decide what to do. E.g.
// ignore if this is the last log in sequence.
// TODO is this scenario still possible if the log has been
// recovered (i.e. closed)
LOG.warn("Could not open " + path + " for reading. File is empty", e);
return null;
} else {
// EOFException being ignored
return null;
}
}
} catch (IOException e) {
if (e instanceof FileNotFoundException) {
// A wal file may not exist anymore. Nothing can be recovered so move on
LOG.warn("File " + path + " doesn't exist anymore.", e);
return null;
}
if (!skipErrors || e instanceof InterruptedIOException) {
throw e; // Don't mark the file corrupted if interrupted, or not skipErrors
}
CorruptedLogFileException t =
new CorruptedLogFileException("skipErrors=true Could not open wal " +
path + " ignoring");
t.initCause(e);
throw t;
}
return in;
} | Reader function(FileStatus file, boolean skipErrors, CancelableProgressable reporter) throws IOException, CorruptedLogFileException { Path path = file.getPath(); long length = file.getLen(); Reader in; if (length <= 0) { LOG.warn(STR + path + STR); } try { FSUtils.getInstance(fs, conf).recoverFileLease(fs, path, conf, reporter); try { in = getReader(path, reporter); } catch (EOFException e) { if (length <= 0) { LOG.warn(STR + path + STR, e); return null; } else { return null; } } } catch (IOException e) { if (e instanceof FileNotFoundException) { LOG.warn(STR + path + STR, e); return null; } if (!skipErrors e instanceof InterruptedIOException) { throw e; } CorruptedLogFileException t = new CorruptedLogFileException(STR + path + STR); t.initCause(e); throw t; } return in; } | /**
* Create a new {@link Reader} for reading logs to split.
*
* @param file
* @return A new Reader instance, caller should close
* @throws IOException
* @throws CorruptedLogFileException
*/ | Create a new <code>Reader</code> for reading logs to split | getReader | {
"repo_name": "toshimasa-nasu/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java",
"license": "apache-2.0",
"size": 82681
} | [
"java.io.EOFException",
"java.io.FileNotFoundException",
"java.io.IOException",
"java.io.InterruptedIOException",
"org.apache.hadoop.fs.FileStatus",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.util.CancelableProgressable",
"org.apache.hadoop.hbase.util.FSUtils",
"org.apache.hadoop.hbase.wal.WAL"
] | import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.WAL; | import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.hbase.wal.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,953,163 |
@Test
public void checkMultipleQueriesSameTransaction() {
long originalScore = 0; // default value for new database.
long oldScore = dbi.updateScore("u1", 1);
assertEquals(originalScore, oldScore);
User user = dbi.getUserByLogin("u1");
assertNotNull(user);
assertEquals(1L, user.score);
} | void function() { long originalScore = 0; long oldScore = dbi.updateScore("u1", 1); assertEquals(originalScore, oldScore); User user = dbi.getUserByLogin("u1"); assertNotNull(user); assertEquals(1L, user.score); } | /**
* SampleDbi.updateScore() method internally uses 2 raw queries in the same transaction.
*/ | SampleDbi.updateScore() method internally uses 2 raw queries in the same transaction | checkMultipleQueriesSameTransaction | {
"repo_name": "mjdbc/mjdbc",
"path": "src/test/java/com/github/mjdbc/test/SamplesTest.java",
"license": "apache-2.0",
"size": 7101
} | [
"com.github.mjdbc.test.asset.model.User"
] | import com.github.mjdbc.test.asset.model.User; | import com.github.mjdbc.test.asset.model.*; | [
"com.github.mjdbc"
] | com.github.mjdbc; | 2,227,333 |
private void useDeclaredParameters(final int arity, ListBuffer<JCStatement> stmts,
ListBuffer<JCExpression> args) {
for (int a= 0; a < numParams; a++) {
makeParameterArgument(arity, stmts, args, a);
}
}
/**
* Constructs an argument list for the target method as follows:
* <ol>
* <li>uses the declared parameters {@code $param$0}, {@code $param$1}, ...,
* <li>then pulls further arguments from the sequential parameter
* using {@code $param$n.get()} | void function(final int arity, ListBuffer<JCStatement> stmts, ListBuffer<JCExpression> args) { for (int a= 0; a < numParams; a++) { makeParameterArgument(arity, stmts, args, a); } } /** * Constructs an argument list for the target method as follows: * <ol> * <li>uses the declared parameters {@code $param$0}, {@code $param$1}, ..., * <li>then pulls further arguments from the sequential parameter * using {@code $param$n.get()} | /**
* Constructs an argument list for the target method as follows:
* <ol>
* <li>uses the declared parameters {@code $param$0}, {@code $param$1}, ...,
* </ol>
*/ | Constructs an argument list for the target method as follows: uses the declared parameters $param$0, $param$1, ..., | useDeclaredParameters | {
"repo_name": "ceylon/ceylon",
"path": "compiler-java/src/org/eclipse/ceylon/compiler/java/codegen/CallableBuilder.java",
"license": "apache-2.0",
"size": 97136
} | [
"org.eclipse.ceylon.langtools.tools.javac.tree.JCTree",
"org.eclipse.ceylon.langtools.tools.javac.util.ListBuffer"
] | import org.eclipse.ceylon.langtools.tools.javac.tree.JCTree; import org.eclipse.ceylon.langtools.tools.javac.util.ListBuffer; | import org.eclipse.ceylon.langtools.tools.javac.tree.*; import org.eclipse.ceylon.langtools.tools.javac.util.*; | [
"org.eclipse.ceylon"
] | org.eclipse.ceylon; | 779,069 |
@Override
public DataStore open(final StorageConnector connector) throws DataStoreException {
return new SQLStore(this, connector, ResourceDefinition.table(WILDCARD));
} | DataStore function(final StorageConnector connector) throws DataStoreException { return new SQLStore(this, connector, ResourceDefinition.table(WILDCARD)); } | /**
* Returns a {@link SQLStore} implementation associated with this provider.
* The store will provide resources for all tables and views in all schemas and catalogs.
*
* @param connector information about the storage (data source).
* @return a data store implementation associated with this provider for the given storage.
* @throws DataStoreException if an error occurred while creating the data store instance.
*/ | Returns a <code>SQLStore</code> implementation associated with this provider. The store will provide resources for all tables and views in all schemas and catalogs | open | {
"repo_name": "apache/sis",
"path": "storage/sis-sqlstore/src/main/java/org/apache/sis/storage/sql/SQLStoreProvider.java",
"license": "apache-2.0",
"size": 9969
} | [
"org.apache.sis.storage.DataStore",
"org.apache.sis.storage.DataStoreException",
"org.apache.sis.storage.StorageConnector"
] | import org.apache.sis.storage.DataStore; import org.apache.sis.storage.DataStoreException; import org.apache.sis.storage.StorageConnector; | import org.apache.sis.storage.*; | [
"org.apache.sis"
] | org.apache.sis; | 1,508,236 |
public ItemStack changeItem(ItemStack stack, String name) {
ItemMeta meta = stack.getItemMeta();
if (name != null)
meta.setDisplayName(ChatColor.RED + name);
stack.setItemMeta(meta);
return stack;
// For statement documentation, check the above function.
} | ItemStack function(ItemStack stack, String name) { ItemMeta meta = stack.getItemMeta(); if (name != null) meta.setDisplayName(ChatColor.RED + name); stack.setItemMeta(meta); return stack; } | /**
* ORIGINAL METHOD.
* Modifies an ItemStack's ItemMeta.
* Use this method if you do not want to apply lore.
*
* @param stack The ItemStack to change.
* @param name The ItemStack's name.
* <p>
* Currently not used, but may used in the future?
* (-> no maps I added currently use this function)
*/ | ORIGINAL METHOD. Modifies an ItemStack's ItemMeta. Use this method if you do not want to apply lore | changeItem | {
"repo_name": "outspier/War-Framework",
"path": "src/au/edu/swin/war/framework/util/modules/ItemUtility.java",
"license": "mit",
"size": 10424
} | [
"org.bukkit.ChatColor",
"org.bukkit.inventory.ItemStack",
"org.bukkit.inventory.meta.ItemMeta"
] | import org.bukkit.ChatColor; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; | import org.bukkit.*; import org.bukkit.inventory.*; import org.bukkit.inventory.meta.*; | [
"org.bukkit",
"org.bukkit.inventory"
] | org.bukkit; org.bukkit.inventory; | 2,595,671 |
@Override
public TypedScope createScope(Node root, AbstractScope<?, ?> parent) {
checkArgument(parent == null || parent instanceof TypedScope);
TypedScope typedParent = (TypedScope) parent;
TypedScope scope = memoized.get(root);
if (scope != null) {
checkState(typedParent == scope.getParent());
} else {
scope = createScopeInternal(root, typedParent);
memoized.put(root, scope);
}
return scope;
} | TypedScope function(Node root, AbstractScope<?, ?> parent) { checkArgument(parent == null parent instanceof TypedScope); TypedScope typedParent = (TypedScope) parent; TypedScope scope = memoized.get(root); if (scope != null) { checkState(typedParent == scope.getParent()); } else { scope = createScopeInternal(root, typedParent); memoized.put(root, scope); } return scope; } | /**
* Creates a scope with all types declared. Declares newly discovered types
* and type properties in the type registry.
*/ | Creates a scope with all types declared. Declares newly discovered types and type properties in the type registry | createScope | {
"repo_name": "tdelmas/closure-compiler",
"path": "src/com/google/javascript/jscomp/TypedScopeCreator.java",
"license": "apache-2.0",
"size": 87731
} | [
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.Node"
] | import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node; | import com.google.common.base.*; import com.google.javascript.rhino.*; | [
"com.google.common",
"com.google.javascript"
] | com.google.common; com.google.javascript; | 305,018 |
void perform(CcId ccId, IControllerService cs) throws HyracksDataException; | void perform(CcId ccId, IControllerService cs) throws HyracksDataException; | /**
* Performs the task.
*
* @param ccId
* @param cs
* @throws HyracksDataException
*/ | Performs the task | perform | {
"repo_name": "apache/incubator-asterixdb",
"path": "asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INCLifecycleTask.java",
"license": "apache-2.0",
"size": 1326
} | [
"org.apache.hyracks.api.control.CcId",
"org.apache.hyracks.api.exceptions.HyracksDataException",
"org.apache.hyracks.api.service.IControllerService"
] | import org.apache.hyracks.api.control.CcId; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.service.IControllerService; | import org.apache.hyracks.api.control.*; import org.apache.hyracks.api.exceptions.*; import org.apache.hyracks.api.service.*; | [
"org.apache.hyracks"
] | org.apache.hyracks; | 1,052,697 |
@Test
public void containersShouldHavePersistentSessionData() throws IOException, URISyntaxException {
manager.startAllInactiveContainers();
String key = "value_testSessionPersists";
String value = "Foo";
client.setPort(Integer.parseInt(manager.getContainerPort(0)));
Client.Response resp = client.set(key, value);
getKeyValueDataOnAllClients(key, value, resp.getSessionCookie());
} | void function() throws IOException, URISyntaxException { manager.startAllInactiveContainers(); String key = STR; String value = "Foo"; client.setPort(Integer.parseInt(manager.getContainerPort(0))); Client.Response resp = client.set(key, value); getKeyValueDataOnAllClients(key, value, resp.getSessionCookie()); } | /**
* Test that when a session attribute is set in one container, it is replicated to other
* containers
*/ | Test that when a session attribute is set in one container, it is replicated to other containers | containersShouldHavePersistentSessionData | {
"repo_name": "davebarnes97/geode",
"path": "geode-assembly/src/distributedTest/java/org/apache/geode/session/tests/CargoTestBase.java",
"license": "apache-2.0",
"size": 13497
} | [
"java.io.IOException",
"java.net.URISyntaxException"
] | import java.io.IOException; import java.net.URISyntaxException; | import java.io.*; import java.net.*; | [
"java.io",
"java.net"
] | java.io; java.net; | 264,318 |
private void setupChild(View child, int position, int y, boolean flowDown, int childrenLeft, boolean selected, boolean recycled) {
final boolean isSelected = selected && shouldShowSelector();
final boolean updateChildSelected = isSelected != child.isSelected();
final int mode = mTouchMode;
final boolean isPressed = mode > TOUCH_MODE_DOWN && mode < TOUCH_MODE_SCROLL && mMotionPosition == position;
final boolean updateChildPressed = isPressed != child.isPressed();
final boolean needToMeasure = !recycled || updateChildSelected || child.isLayoutRequested();
// Respect layout params that are already in the view. Otherwise make
// some up...
// noinspection unchecked
PLA_AbsListView.LayoutParams p = (PLA_AbsListView.LayoutParams) child.getLayoutParams();
if (p == null) {
p = new PLA_AbsListView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, 0);
}
p.viewType = mAdapter.getItemViewType(position);
if ((recycled && !p.forceAdd) || (p.recycledHeaderFooter && p.viewType == PLA_AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER)) {
attachViewToParent(child, flowDown ? -1 : 0, p);
} else {
p.forceAdd = false;
if (p.viewType == PLA_AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER) {
p.recycledHeaderFooter = true;
}
addViewInLayout(child, flowDown ? -1 : 0, p, true);
}
if (updateChildSelected) {
child.setSelected(isSelected);
}
if (updateChildPressed) {
child.setPressed(isPressed);
}
if (needToMeasure) {
int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec, mListPadding.left + mListPadding.right, p.width);
int lpHeight = p.height;
int childHeightSpec;
if (lpHeight > 0) {
childHeightSpec = MeasureSpec.makeMeasureSpec(lpHeight, MeasureSpec.EXACTLY);
} else {
childHeightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
}
onMeasureChild(child, position, childWidthSpec, childHeightSpec);
} else {
cleanupLayoutState(child);
}
final int w = child.getMeasuredWidth();
final int h = child.getMeasuredHeight();
final int childTop = flowDown ? y : y - h;
if (needToMeasure) {
final int childRight = childrenLeft + w;
final int childBottom = childTop + h;
onLayoutChild(child, position, childrenLeft, childTop, childRight, childBottom);
} else {
final int offsetLeft = childrenLeft - child.getLeft();
final int offsetTop = childTop - child.getTop();
onOffsetChild(child, position, offsetLeft, offsetTop);
}
if (mCachingStarted && !child.isDrawingCacheEnabled()) {
child.setDrawingCacheEnabled(true);
}
} | void function(View child, int position, int y, boolean flowDown, int childrenLeft, boolean selected, boolean recycled) { final boolean isSelected = selected && shouldShowSelector(); final boolean updateChildSelected = isSelected != child.isSelected(); final int mode = mTouchMode; final boolean isPressed = mode > TOUCH_MODE_DOWN && mode < TOUCH_MODE_SCROLL && mMotionPosition == position; final boolean updateChildPressed = isPressed != child.isPressed(); final boolean needToMeasure = !recycled updateChildSelected child.isLayoutRequested(); PLA_AbsListView.LayoutParams p = (PLA_AbsListView.LayoutParams) child.getLayoutParams(); if (p == null) { p = new PLA_AbsListView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, 0); } p.viewType = mAdapter.getItemViewType(position); if ((recycled && !p.forceAdd) (p.recycledHeaderFooter && p.viewType == PLA_AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER)) { attachViewToParent(child, flowDown ? -1 : 0, p); } else { p.forceAdd = false; if (p.viewType == PLA_AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER) { p.recycledHeaderFooter = true; } addViewInLayout(child, flowDown ? -1 : 0, p, true); } if (updateChildSelected) { child.setSelected(isSelected); } if (updateChildPressed) { child.setPressed(isPressed); } if (needToMeasure) { int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec, mListPadding.left + mListPadding.right, p.width); int lpHeight = p.height; int childHeightSpec; if (lpHeight > 0) { childHeightSpec = MeasureSpec.makeMeasureSpec(lpHeight, MeasureSpec.EXACTLY); } else { childHeightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED); } onMeasureChild(child, position, childWidthSpec, childHeightSpec); } else { cleanupLayoutState(child); } final int w = child.getMeasuredWidth(); final int h = child.getMeasuredHeight(); final int childTop = flowDown ? y : y - h; if (needToMeasure) { final int childRight = childrenLeft + w; final int childBottom = childTop + h; onLayoutChild(child, position, childrenLeft, childTop, childRight, childBottom); } else { final int offsetLeft = childrenLeft - child.getLeft(); final int offsetTop = childTop - child.getTop(); onOffsetChild(child, position, offsetLeft, offsetTop); } if (mCachingStarted && !child.isDrawingCacheEnabled()) { child.setDrawingCacheEnabled(true); } } | /**
* Add a view as a child and make sure it is measured (if necessary) and
* positioned properly.
*
* @param child
* The view to add
* @param position
* The position of this child
* @param y
* The y position relative to which this view will be positioned
* @param flowDown
* If true, align top edge to y. If false, align bottom edge to
* y.
* @param childrenLeft
* Left edge where children should be positioned
* @param selected
* Is this position selected?
* @param recycled
* Has this view been pulled from the recycle bin? If so it does
* not need to be remeasured.
*/ | Add a view as a child and make sure it is measured (if necessary) and positioned properly | setupChild | {
"repo_name": "longwei243/viewlibrary",
"path": "ViewLibrary/src/org/longwei/view/pinterestlike/PLA_ListView.java",
"license": "apache-2.0",
"size": 76727
} | [
"android.view.View",
"android.view.ViewGroup"
] | import android.view.View; import android.view.ViewGroup; | import android.view.*; | [
"android.view"
] | android.view; | 485,310 |
public ResponseProjectOperation getResponse()
{
Throwable cause = getCause();
if (null == cause)
return ResponseProjectFactory.constructResponse(code, getMessage(), "");
else
return ResponseProjectFactory.constructResponse(code, getMessage(), cause.getMessage());
} | ResponseProjectOperation function() { Throwable cause = getCause(); if (null == cause) return ResponseProjectFactory.constructResponse(code, getMessage(), ""); else return ResponseProjectFactory.constructResponse(code, getMessage(), cause.getMessage()); } | /**
* Returns ResponseProjectOperation containing code, message and cause key-value pairs used in REST responses
*
* @return ResponseProjectOperation containing code, message and cause key-value pairs used in REST responses
*/ | Returns ResponseProjectOperation containing code, message and cause key-value pairs used in REST responses | getResponse | {
"repo_name": "dkoudela/crucible-project-admin-restapi",
"path": "src/main/java/com/davidkoudela/crucible/exceptions/ProjectAdminException.java",
"license": "gpl-2.0",
"size": 1318
} | [
"com.davidkoudela.crucible.rest.response.ResponseProjectFactory",
"com.davidkoudela.crucible.rest.response.ResponseProjectOperation"
] | import com.davidkoudela.crucible.rest.response.ResponseProjectFactory; import com.davidkoudela.crucible.rest.response.ResponseProjectOperation; | import com.davidkoudela.crucible.rest.response.*; | [
"com.davidkoudela.crucible"
] | com.davidkoudela.crucible; | 2,436,730 |
private static void deleteEdgeIfAny(final OIdentifiable iRecord, boolean forceReload) {
if (iRecord != null) {
final ODocument doc = getDocument(iRecord, forceReload);
if (doc != null) {
final OImmutableClass clazz = ODocumentInternal.getImmutableSchemaClass(doc);
if (clazz != null && clazz.isEdgeType())
// DELETE THE EDGE RECORD TOO
doc.delete();
}
}
} | static void function(final OIdentifiable iRecord, boolean forceReload) { if (iRecord != null) { final ODocument doc = getDocument(iRecord, forceReload); if (doc != null) { final OImmutableClass clazz = ODocumentInternal.getImmutableSchemaClass(doc); if (clazz != null && clazz.isEdgeType()) doc.delete(); } } } | /**
* (Internal only)
*/ | (Internal only) | deleteEdgeIfAny | {
"repo_name": "mmacfadden/orientdb",
"path": "graphdb/src/main/java/com/tinkerpop/blueprints/impls/orient/OrientBaseGraph.java",
"license": "apache-2.0",
"size": 69036
} | [
"com.orientechnologies.orient.core.db.record.OIdentifiable",
"com.orientechnologies.orient.core.metadata.schema.OImmutableClass",
"com.orientechnologies.orient.core.record.impl.ODocument",
"com.orientechnologies.orient.core.record.impl.ODocumentInternal"
] | import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.metadata.schema.OImmutableClass; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.record.impl.ODocumentInternal; | import com.orientechnologies.orient.core.db.record.*; import com.orientechnologies.orient.core.metadata.schema.*; import com.orientechnologies.orient.core.record.impl.*; | [
"com.orientechnologies.orient"
] | com.orientechnologies.orient; | 2,357,533 |
@Nullable
public static String encodeText (@Nonnull final BitSet aPrintableBitSet,
@Nullable final String sDecoded,
@Nonnull final Charset aCharset)
{
if (sDecoded == null)
return null;
final byte [] aEncodedData = encodeQuotedPrintable (aPrintableBitSet,
CharsetManager.getAsBytes (sDecoded, aCharset));
return CharsetManager.getAsString (aEncodedData, CCharset.CHARSET_US_ASCII_OBJ);
} | static String function (@Nonnull final BitSet aPrintableBitSet, @Nullable final String sDecoded, @Nonnull final Charset aCharset) { if (sDecoded == null) return null; final byte [] aEncodedData = encodeQuotedPrintable (aPrintableBitSet, CharsetManager.getAsBytes (sDecoded, aCharset)); return CharsetManager.getAsString (aEncodedData, CCharset.CHARSET_US_ASCII_OBJ); } | /**
* Encode the passed text using a custom BitSet
*
* @param aPrintableBitSet
* The BitSet with all chars to <b>NOT</b> escape. May not be
* <code>null</code>.
* @param sDecoded
* The original string to be encoded. May be <code>null</code>.
* @param aCharset
* The charset to be used. May not be <code>null</code>.
* @return The encoded string in US-ASCII encoding. May be <code>null</code>
* if the original string is <code>null</code>.
*/ | Encode the passed text using a custom BitSet | encodeText | {
"repo_name": "lsimons/phloc-schematron-standalone",
"path": "phloc-commons/src/main/java/com/phloc/commons/codec/QuotedPrintableCodec.java",
"license": "apache-2.0",
"size": 7579
} | [
"com.phloc.commons.charset.CCharset",
"com.phloc.commons.charset.CharsetManager",
"java.nio.charset.Charset",
"java.util.BitSet",
"javax.annotation.Nonnull",
"javax.annotation.Nullable"
] | import com.phloc.commons.charset.CCharset; import com.phloc.commons.charset.CharsetManager; import java.nio.charset.Charset; import java.util.BitSet; import javax.annotation.Nonnull; import javax.annotation.Nullable; | import com.phloc.commons.charset.*; import java.nio.charset.*; import java.util.*; import javax.annotation.*; | [
"com.phloc.commons",
"java.nio",
"java.util",
"javax.annotation"
] | com.phloc.commons; java.nio; java.util; javax.annotation; | 818,535 |
public static beta2Type fromPerUnaligned(byte[] encodedBytes) {
beta2Type result = new beta2Type();
result.decodePerUnaligned(new BitStreamReader(encodedBytes));
return result;
} | static beta2Type function(byte[] encodedBytes) { beta2Type result = new beta2Type(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } | /**
* Creates a new beta2Type from encoded stream.
*/ | Creates a new beta2Type from encoded stream | fromPerUnaligned | {
"repo_name": "google/supl-client",
"path": "src/main/java/com/google/location/suplclient/asn1/supl2/rrlp_components/IonosphericModel.java",
"license": "apache-2.0",
"size": 32842
} | [
"com.google.location.suplclient.asn1.base.BitStreamReader"
] | import com.google.location.suplclient.asn1.base.BitStreamReader; | import com.google.location.suplclient.asn1.base.*; | [
"com.google.location"
] | com.google.location; | 2,015,638 |
public Iterable<NewsgroupInfo> iterateNewsgroups(String wildmat) throws IOException {
return new NewsgroupIterator(iterateNewsgroupListing(wildmat));
} | Iterable<NewsgroupInfo> function(String wildmat) throws IOException { return new NewsgroupIterator(iterateNewsgroupListing(wildmat)); } | /**
* List the newsgroups that match a given pattern.
* Uses the "LIST ACTIVE" command.
* <p>
* @param wildmat a pseudo-regex pattern (cf. RFC 2980)
* @return An iterable NewsgroupInfo instances containing the information
* for each newsgroup served by the NNTP server corresponding to the
* supplied pattern. If no such newsgroups are served, no entries
* will be returned.
* @throws IOException on error
* @since 3.0
*/ | List the newsgroups that match a given pattern. Uses the "LIST ACTIVE" command. | iterateNewsgroups | {
"repo_name": "codolutions/commons-net",
"path": "src/main/java/org/apache/commons/net/nntp/NNTPClient.java",
"license": "apache-2.0",
"size": 77301
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,609,090 |
public static String calculateTreeHash(List<byte[]> checksums) throws AmazonClientException {
List<byte[]> hashes = new ArrayList<byte[]>();
hashes.addAll(checksums);
while ( hashes.size() > 1 ) {
List<byte[]> treeHashes = new ArrayList<byte[]>();
for ( int i = 0; i < hashes.size() / 2; i++ ) {
byte[] firstPart = hashes.get(2 * i);
byte[] secondPart = hashes.get(2 * i + 1);
byte[] concatenation = new byte[firstPart.length + secondPart.length];
System.arraycopy(firstPart, 0, concatenation, 0, firstPart.length);
System.arraycopy(secondPart, 0, concatenation, firstPart.length, secondPart.length);
try {
treeHashes.add(computeSHA256Hash(concatenation));
} catch (Exception e) {
throw new AmazonClientException("Unable to compute hash", e);
}
}
if ( hashes.size() % 2 == 1 ) {
treeHashes.add(hashes.get(hashes.size() - 1));
}
hashes = treeHashes;
}
return BinaryUtils.toHex(hashes.get(0));
}
| static String function(List<byte[]> checksums) throws AmazonClientException { List<byte[]> hashes = new ArrayList<byte[]>(); hashes.addAll(checksums); while ( hashes.size() > 1 ) { List<byte[]> treeHashes = new ArrayList<byte[]>(); for ( int i = 0; i < hashes.size() / 2; i++ ) { byte[] firstPart = hashes.get(2 * i); byte[] secondPart = hashes.get(2 * i + 1); byte[] concatenation = new byte[firstPart.length + secondPart.length]; System.arraycopy(firstPart, 0, concatenation, 0, firstPart.length); System.arraycopy(secondPart, 0, concatenation, firstPart.length, secondPart.length); try { treeHashes.add(computeSHA256Hash(concatenation)); } catch (Exception e) { throw new AmazonClientException(STR, e); } } if ( hashes.size() % 2 == 1 ) { treeHashes.add(hashes.get(hashes.size() - 1)); } hashes = treeHashes; } return BinaryUtils.toHex(hashes.get(0)); } | /**
* Returns the hex encoded binary tree hash for the individual checksums
* given. The sums are assumed to have been generated from sequential 1MB
* portions of a larger file, with the possible exception of the last part,
* which may be less than a full MB.
*
* @return The combined hex encoded binary tree hash for the individual
* checksums specified.
*
* @throws AmazonClientException
* If problems were encountered reading the data or calculating
* the hash.
*/ | Returns the hex encoded binary tree hash for the individual checksums given. The sums are assumed to have been generated from sequential 1MB portions of a larger file, with the possible exception of the last part, which may be less than a full MB | calculateTreeHash | {
"repo_name": "jentfoo/aws-sdk-java",
"path": "aws-java-sdk-glacier/src/main/java/com/amazonaws/services/glacier/TreeHashGenerator.java",
"license": "apache-2.0",
"size": 6290
} | [
"com.amazonaws.AmazonClientException",
"com.amazonaws.util.BinaryUtils",
"java.util.ArrayList",
"java.util.List"
] | import com.amazonaws.AmazonClientException; import com.amazonaws.util.BinaryUtils; import java.util.ArrayList; import java.util.List; | import com.amazonaws.*; import com.amazonaws.util.*; import java.util.*; | [
"com.amazonaws",
"com.amazonaws.util",
"java.util"
] | com.amazonaws; com.amazonaws.util; java.util; | 722,611 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.