method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public static byte[] packByteBufferIntoSingleByteArray(byte[][] byteBufferArr) {
// for empty array means there is no data to remove dictionary.
if (null == byteBufferArr || byteBufferArr.length == 0) {
return null;
}
int noOfCol = byteBufferArr.length;
short toDetermineLengthOfByteArr = 2;
short offsetLen = (short) (noOfCol * 2 + toDetermineLengthOfByteArr);
int totalBytes = calculateTotalBytes(byteBufferArr) + offsetLen;
ByteBuffer buffer = ByteBuffer.allocate(totalBytes);
// write the length of the byte [] as first short
buffer.putShort((short) (totalBytes - toDetermineLengthOfByteArr));
// writing the offset of the first element.
buffer.putShort(offsetLen);
// prepare index for byte []
for (int index = 0; index < byteBufferArr.length - 1; index++) {
int noOfBytes = byteBufferArr[index].length;
buffer.putShort((short) (offsetLen + noOfBytes));
offsetLen += noOfBytes;
}
// put actual data.
for (int index = 0; index < byteBufferArr.length; index++) {
buffer.put(byteBufferArr[index]);
}
buffer.rewind();
return buffer.array();
} | static byte[] function(byte[][] byteBufferArr) { if (null == byteBufferArr byteBufferArr.length == 0) { return null; } int noOfCol = byteBufferArr.length; short toDetermineLengthOfByteArr = 2; short offsetLen = (short) (noOfCol * 2 + toDetermineLengthOfByteArr); int totalBytes = calculateTotalBytes(byteBufferArr) + offsetLen; ByteBuffer buffer = ByteBuffer.allocate(totalBytes); buffer.putShort((short) (totalBytes - toDetermineLengthOfByteArr)); buffer.putShort(offsetLen); for (int index = 0; index < byteBufferArr.length - 1; index++) { int noOfBytes = byteBufferArr[index].length; buffer.putShort((short) (offsetLen + noOfBytes)); offsetLen += noOfBytes; } for (int index = 0; index < byteBufferArr.length; index++) { buffer.put(byteBufferArr[index]); } buffer.rewind(); return buffer.array(); } | /**
* This method will form one single byte [] for all the high card dims.
* For example if you need to pack 2 columns c1 and c2 , it stores in following way
* <total_len(short)><offsetLen(short)><offsetLen+c1_len(short)><c1(byte[])><c2(byte[])>
* @param byteBufferArr
* @return
*/ | This method will form one single byte [] for all the high card dims. For example if you need to pack 2 columns c1 and c2 , it stores in following way | packByteBufferIntoSingleByteArray | {
"repo_name": "ashokblend/incubator-carbondata",
"path": "processing/src/main/java/org/apache/carbondata/processing/util/RemoveDictionaryUtil.java",
"license": "apache-2.0",
"size": 15273
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 2,332,450 |
public Builder setSource(Bitmap bitmap) {
Preconditions.checkNotNull(bitmap, "Parameter \"bitmap\" was null.");
if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) {
throw new IllegalArgumentException(
"Invalid Bitmap: Bitmap's configuration must be "
+ "ARGB_8888, but it was "
+ bitmap.getConfig());
}
if (bitmap.hasAlpha() && !bitmap.isPremultiplied()) {
throw new IllegalArgumentException("Invalid Bitmap: Bitmap must be premultiplied.");
}
if (bitmap.getWidth() > MAX_BITMAP_SIZE || bitmap.getHeight() > MAX_BITMAP_SIZE) {
throw new IllegalArgumentException(
"Invalid Bitmap: Bitmap width and height must be "
+ "smaller than 4096. Bitmap was "
+ bitmap.getWidth()
+ " width and "
+ bitmap.getHeight()
+ " height.");
}
this.bitmap = bitmap;
// TODO: don't overwrite calls to setRegistryId
registryId = null;
inputStreamCreator = null;
return this;
} | Builder function(Bitmap bitmap) { Preconditions.checkNotNull(bitmap, STRbitmap\STR); if (bitmap.getConfig() != Bitmap.Config.ARGB_8888) { throw new IllegalArgumentException( STR + STR + bitmap.getConfig()); } if (bitmap.hasAlpha() && !bitmap.isPremultiplied()) { throw new IllegalArgumentException(STR); } if (bitmap.getWidth() > MAX_BITMAP_SIZE bitmap.getHeight() > MAX_BITMAP_SIZE) { throw new IllegalArgumentException( STR + STR + bitmap.getWidth() + STR + bitmap.getHeight() + STR); } this.bitmap = bitmap; registryId = null; inputStreamCreator = null; return this; } | /**
* Allows a {@link Texture} to be constructed from a {@link Bitmap}. Construction will be
* immediate.
*
* <p>The Bitmap must meet the following conditions to be used by Sceneform:
*
* <ul>
* <li>{@link Bitmap#getConfig()} must be {@link Bitmap.Config#ARGB_8888}.
* <li>{@link Bitmap#isPremultiplied()} must be true.
* <li>The width and height must be smaller than 4096 pixels.
* </ul>
*
* @param bitmap {@link Bitmap} source of texture data
* @throws IllegalArgumentException if the bitmap isn't valid
*/ | Allows a <code>Texture</code> to be constructed from a <code>Bitmap</code>. Construction will be immediate. The Bitmap must meet the following conditions to be used by Sceneform: <code>Bitmap#getConfig()</code> must be <code>Bitmap.Config#ARGB_8888</code>. <code>Bitmap#isPremultiplied()</code> must be true. The width and height must be smaller than 4096 pixels. | setSource | {
"repo_name": "google-ar/sceneform-android-sdk",
"path": "sceneformsrc/sceneform/src/main/java/com/google/ar/sceneform/rendering/Texture.java",
"license": "apache-2.0",
"size": 19935
} | [
"android.graphics.Bitmap",
"com.google.ar.sceneform.utilities.Preconditions"
] | import android.graphics.Bitmap; import com.google.ar.sceneform.utilities.Preconditions; | import android.graphics.*; import com.google.ar.sceneform.utilities.*; | [
"android.graphics",
"com.google.ar"
] | android.graphics; com.google.ar; | 214,089 |
public synchronized static void initDB() {
Statement s = null;
try {
openConnection();
s = con.createStatement();
s.execute("CREATE DATABASE IF NOT EXISTS discordbans");
s.execute("USE discordbans");
s.execute("CREATE TABLE IF NOT EXISTS bans (id INT primary key auto_increment, user text NOT NULL, discordID bigint(20) NOT NULL, ban_count int(11) NOT NULL, lastbandate date NOT NULL );");
s.execute("CREATE TABLE IF NOT EXISTS icons (id INT primary key auto_increment, discordID BIGINT NOT NULL, favicon TEXT NOT NULL);");
closeConnection();
} catch(SQLException ex) {
ex.printStackTrace();
System.exit(-1);
}
} | synchronized static void function() { Statement s = null; try { openConnection(); s = con.createStatement(); s.execute(STR); s.execute(STR); s.execute(STR); s.execute(STR); closeConnection(); } catch(SQLException ex) { ex.printStackTrace(); System.exit(-1); } } | /**
* If this is the first time DiscordBans-Bot has been
* started, it will attempt to create the needed tables
* in a database named discordbans
* @throws SQLException - The MySQL platform does not exist/is not online.
*/ | If this is the first time DiscordBans-Bot has been started, it will attempt to create the needed tables in a database named discordbans | initDB | {
"repo_name": "SSederberg/DiscordBans-Bot",
"path": "src/me/spencersederberg/discordbansbot/BanAPI.java",
"license": "apache-2.0",
"size": 10138
} | [
"java.sql.SQLException",
"java.sql.Statement"
] | import java.sql.SQLException; import java.sql.Statement; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,658,268 |
public StringProperty getSignalIcon() {
return this.signalIcon;
}
| StringProperty function() { return this.signalIcon; } | /**
* The Workflow Signal icon.
*
* @return the StringProperty.
*/ | The Workflow Signal icon | getSignalIcon | {
"repo_name": "NABUCCO/org.nabucco.framework.base",
"path": "org.nabucco.framework.base.facade.datatype/src/main/gen/org/nabucco/framework/base/facade/datatype/extension/schema/ui/work/WorkItemWorkflowExtension.java",
"license": "epl-1.0",
"size": 14079
} | [
"org.nabucco.framework.base.facade.datatype.extension.property.StringProperty"
] | import org.nabucco.framework.base.facade.datatype.extension.property.StringProperty; | import org.nabucco.framework.base.facade.datatype.extension.property.*; | [
"org.nabucco.framework"
] | org.nabucco.framework; | 1,034,675 |
private Result finishTail() throws IgniteCheckedException {
assert !isFinished();
assert tail.type == Tail.EXACT && tail.lvl >= 0: tail;
if (tail.lvl == 0) {
// At the bottom level we can't have a tail without a sibling, it means we have higher levels.
assert tail.sibling != null : tail;
return NOT_FOUND; // Lock upper level, we are at the bottom now.
}
else {
// We may lock wrong triangle because of concurrent operations.
if (!validateTail()) {
if (releaseForRetry(tail))
return RETRY;
// It was a regular merge, leave as is and exit.
}
else {
// Try to find inner key on inner level.
if (needReplaceInner == TRUE) {
// Since we setup needReplaceInner in leaf page write lock and do not release it,
// we should not be able to miss the inner key. Even if concurrent merge
// happened the inner key must still exist.
if (!isInnerKeyInTail())
return NOT_FOUND; // Lock the whole branch up to the inner key.
needReplaceInner = READY;
}
// Try to merge an empty branch.
if (needMergeEmptyBranch == TRUE) {
// We can't merge empty branch if tail is a routing page.
if (tail.getCount() == 0)
return NOT_FOUND; // Lock the whole branch up to the first non-empty.
// Top-down merge for empty branch. The actual row remove will happen here if everything is ok.
Tail<L> t = mergeEmptyBranch();
if (t != null) {
// We were not able to merge empty branch, need to release and retry.
boolean ok = releaseForRetry(t);
assert ok; // Here we must always retry because it is not a regular merge.
return RETRY;
}
needMergeEmptyBranch = DONE;
}
// The actual row remove may happen here as well.
mergeBottomUp(tail);
if (needReplaceInner == READY) {
replaceInner(); // Replace inner key with new max key for the left subtree.
needReplaceInner = DONE;
}
assert needReplaceInner != TRUE;
if (tail.getCount() == 0 && tail.lvl != 0 && getRootLevel() == tail.lvl) {
// Free root if it became empty after merge.
cutRoot(tail.lvl);
freePage(tail.pageId, tail.page, tail.buf, tail.walPlc, false);
// Exit: we are done.
}
else if (tail.sibling != null &&
tail.getCount() + tail.sibling.getCount() < tail.io.getMaxCount(tail.buf, pageSize())) {
// Release everything lower than tail, we've already merged this path.
doReleaseTail(tail.down);
tail.down = null;
return NOT_FOUND; // Lock and merge one level more.
}
// We don't want to merge anything more, exiting.
}
}
// If we've found nothing in the tree, we should not do any modifications or take tail locks.
assert isRemoved();
releaseTail();
finish();
return FOUND;
} | Result function() throws IgniteCheckedException { assert !isFinished(); assert tail.type == Tail.EXACT && tail.lvl >= 0: tail; if (tail.lvl == 0) { assert tail.sibling != null : tail; return NOT_FOUND; } else { if (!validateTail()) { if (releaseForRetry(tail)) return RETRY; } else { if (needReplaceInner == TRUE) { if (!isInnerKeyInTail()) return NOT_FOUND; needReplaceInner = READY; } if (needMergeEmptyBranch == TRUE) { if (tail.getCount() == 0) return NOT_FOUND; Tail<L> t = mergeEmptyBranch(); if (t != null) { boolean ok = releaseForRetry(t); assert ok; return RETRY; } needMergeEmptyBranch = DONE; } mergeBottomUp(tail); if (needReplaceInner == READY) { replaceInner(); needReplaceInner = DONE; } assert needReplaceInner != TRUE; if (tail.getCount() == 0 && tail.lvl != 0 && getRootLevel() == tail.lvl) { cutRoot(tail.lvl); freePage(tail.pageId, tail.page, tail.buf, tail.walPlc, false); } else if (tail.sibling != null && tail.getCount() + tail.sibling.getCount() < tail.io.getMaxCount(tail.buf, pageSize())) { doReleaseTail(tail.down); tail.down = null; return NOT_FOUND; } } } assert isRemoved(); releaseTail(); finish(); return FOUND; } | /**
* Process tail and finish.
*
* @return Result.
* @throws IgniteCheckedException If failed.
*/ | Process tail and finish | finishTail | {
"repo_name": "sk0x50/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/tree/BPlusTree.java",
"license": "apache-2.0",
"size": 160683
} | [
"org.apache.ignite.IgniteCheckedException"
] | import org.apache.ignite.IgniteCheckedException; | import org.apache.ignite.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,947,690 |
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
| void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } | /**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Handles the HTTP <code>GET</code> method | doGet | {
"repo_name": "Navneet-Bedi/BookStoreApplication",
"path": "src/CheckoutServlet.java",
"license": "gpl-3.0",
"size": 4857
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 451,857 |
EntityRecord getSingleTrashRecord(Long entityId, Long instanceId);
/**
* Retrieves value of a single field of an instance. Throws {@link org.motechproject.mds.ex.entity.EntityNotFoundException} | EntityRecord getSingleTrashRecord(Long entityId, Long instanceId); /** * Retrieves value of a single field of an instance. Throws {@link org.motechproject.mds.ex.entity.EntityNotFoundException} | /**
* Allows to retrieve a single instance, that has been moved to trash.
*
* @param entityId id of the entity
* @param instanceId id of the instance in trash
* @return instance representation
*/ | Allows to retrieve a single instance, that has been moved to trash | getSingleTrashRecord | {
"repo_name": "justin-hayes/motech",
"path": "platform/mds/mds-web/src/main/java/org/motechproject/mds/web/service/InstanceService.java",
"license": "bsd-3-clause",
"size": 14144
} | [
"org.motechproject.mds.web.domain.EntityRecord"
] | import org.motechproject.mds.web.domain.EntityRecord; | import org.motechproject.mds.web.domain.*; | [
"org.motechproject.mds"
] | org.motechproject.mds; | 81,987 |
public ContainerShellWrapper loadInstance(Job job)
throws ContainerShellWrapperFactoryException {
Container c = job.getContainer();
String shortName = null;
if (c == null) {
shortName = ContainerShellWrapperFactory.NO_SHELL_WRAPPER_CLASS;
} else {
Container.TYPE type = c.getType();
if (c.getType().equals(Container.TYPE.docker)) {
shortName = ContainerShellWrapperFactory.DOCKER_SHELL_WRAPPER_CLASS;
} else if (c.getType().equals(Container.TYPE.singularity)) {
shortName = ContainerShellWrapperFactory.SINGULARITY_SHELL_WRAPPER_CLASS;
} else if (c.getType().equals(Container.TYPE.shifter)) {
shortName = ContainerShellWrapperFactory.SHIFTER_SHELL_WRAPPER_CLASS;
} else {
throw new ContainerShellWrapperFactoryException(
"Unsupported Container Shell Wrapper of type " + type);
}
}
return loadInstance(mBag, mDAG, shortName);
} | ContainerShellWrapper function(Job job) throws ContainerShellWrapperFactoryException { Container c = job.getContainer(); String shortName = null; if (c == null) { shortName = ContainerShellWrapperFactory.NO_SHELL_WRAPPER_CLASS; } else { Container.TYPE type = c.getType(); if (c.getType().equals(Container.TYPE.docker)) { shortName = ContainerShellWrapperFactory.DOCKER_SHELL_WRAPPER_CLASS; } else if (c.getType().equals(Container.TYPE.singularity)) { shortName = ContainerShellWrapperFactory.SINGULARITY_SHELL_WRAPPER_CLASS; } else if (c.getType().equals(Container.TYPE.shifter)) { shortName = ContainerShellWrapperFactory.SHIFTER_SHELL_WRAPPER_CLASS; } else { throw new ContainerShellWrapperFactoryException( STR + type); } } return loadInstance(mBag, mDAG, shortName); } | /**
* This method loads the appropriate implementing Container Shell Wrapper as specified by the
* user at runtime.
*
* @param job the job for which wrapper is requried.
* @return the instance of the class implementing this interface.
* @exception ContainerShellWrapperFactoryException that nests any error that might occur during
* the instantiation of the implementation.
* @see #DEFAULT_PACKAGE_NAME
*/ | This method loads the appropriate implementing Container Shell Wrapper as specified by the user at runtime | loadInstance | {
"repo_name": "pegasus-isi/pegasus",
"path": "src/edu/isi/pegasus/planner/code/gridstart/container/ContainerShellWrapperFactory.java",
"license": "apache-2.0",
"size": 7325
} | [
"edu.isi.pegasus.planner.catalog.transformation.classes.Container",
"edu.isi.pegasus.planner.classes.Job"
] | import edu.isi.pegasus.planner.catalog.transformation.classes.Container; import edu.isi.pegasus.planner.classes.Job; | import edu.isi.pegasus.planner.catalog.transformation.classes.*; import edu.isi.pegasus.planner.classes.*; | [
"edu.isi.pegasus"
] | edu.isi.pegasus; | 1,002,870 |
public Learning create(final String architecture, final int input,
final int output) {
if (input <= 0) {
throw new RuntimeException("Must have at least one input for EPL.");
}
if (output <= 0) {
throw new RuntimeException("Must have at least one output for EPL.");
}
final Map<String, String> args = ArchitectureParse
.parseParams(architecture);
final ParamsHolder holder = new ParamsHolder(args);
final int populationSize = holder.getInt(
MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000);
final String variables = holder.getString("vars", false, "x");
final String funct = holder.getString("funct", false, null);
final EncogProgramContext context = new EncogProgramContext();
final StringTokenizer tok = new StringTokenizer(variables, ",");
while (tok.hasMoreElements()) {
context.defineVariable(tok.nextToken());
}
if ("numeric".equalsIgnoreCase(funct)) {
StandardExtensions.createNumericOperators(context);
}
final PrgPopulation pop = new PrgPopulation(context, populationSize);
if (context.getFunctions().size() > 0) {
(new RampedHalfAndHalf(context, 2, 6)).generate(new Random(), pop);
}
return pop;
} | Learning function(final String architecture, final int input, final int output) { if (input <= 0) { throw new RuntimeException(STR); } if (output <= 0) { throw new RuntimeException(STR); } final Map<String, String> args = ArchitectureParse .parseParams(architecture); final ParamsHolder holder = new ParamsHolder(args); final int populationSize = holder.getInt( MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000); final String variables = holder.getString("vars", false, "x"); final String funct = holder.getString("funct", false, null); final EncogProgramContext context = new EncogProgramContext(); final StringTokenizer tok = new StringTokenizer(variables, ","); while (tok.hasMoreElements()) { context.defineVariable(tok.nextToken()); } if (STR.equalsIgnoreCase(funct)) { StandardExtensions.createNumericOperators(context); } final PrgPopulation pop = new PrgPopulation(context, populationSize); if (context.getFunctions().size() > 0) { (new RampedHalfAndHalf(context, 2, 6)).generate(new Random(), pop); } return pop; } | /**
* Create a feed forward network.
*
* @param architecture The architecture string to use.
* @param input The input count.
* @param output The output count.
* @return The feedforward network.
*/ | Create a feed forward network | create | {
"repo_name": "automenta/java_dann",
"path": "src/syncleus/dann/util/factory/method/EPLFactory.java",
"license": "agpl-3.0",
"size": 2812
} | [
"java.util.Map",
"java.util.Random",
"java.util.StringTokenizer"
] | import java.util.Map; import java.util.Random; import java.util.StringTokenizer; | import java.util.*; | [
"java.util"
] | java.util; | 1,929,085 |
public static ims.core.vo.PatientDiagnosisListVo insert(DomainObjectMap map, ims.core.vo.PatientDiagnosisListVo valueObject, ims.core.clinical.domain.objects.PatientDiagnosis domainObject)
{
if (null == domainObject)
{
return valueObject;
}
if (null == map)
{
map = new DomainObjectMap();
}
valueObject.setID_PatientDiagnosis(domainObject.getId());
valueObject.setIsRIE(domainObject.getIsRIE());
// If this is a recordedInError record, and the domainObject
// value isIncludeRecord has not been set, then we return null and
// not the value object
if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord())
return null;
// If this is not a recordedInError record, and the domainObject
// value isIncludeRecord has been set, then we return null and
// not the value object
if ((valueObject.getIsRIE() == null || valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord())
return null;
// PrimaryForCareSpells
valueObject.setPrimaryForCareSpells(ims.core.vo.domain.CSPrimaryDiagnosisVoAssembler.createCSPrimaryDiagnosisVoCollectionFromCsPrimaryDiagnosis(map, domainObject.getPrimaryForCareSpells()) );
// isComplication
valueObject.setIsComplication( domainObject.isIsComplication() );
// DateResolved
java.util.Date DateResolved = domainObject.getDateResolved();
if ( null != DateResolved )
{
valueObject.setDateResolved(new ims.framework.utils.Date(DateResolved) );
}
// SysInfo
// set system information
valueObject.setSysInfo(ims.vo.domain.SystemInformationAssembler.create(domainObject.getSystemInformation()));
// DateOnset
java.util.Date DateOnset = domainObject.getDateOnset();
if ( null != DateOnset )
{
valueObject.setDateOnset(new ims.framework.utils.Date(DateOnset) );
}
// ClinicalContact
valueObject.setClinicalContact(ims.core.vo.domain.ClinicalContactShortVoAssembler.create(map, domainObject.getClinicalContact()) );
// DiagLaterality
ims.domain.lookups.LookupInstance instance7 = domainObject.getDiagLaterality();
if ( null != instance7 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance7.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance7.getImage().getImageId(), instance7.getImage().getImagePath());
}
color = instance7.getColor();
if (color != null)
color.getValue();
ims.core.vo.lookups.LateralityLRB voLookup7 = new ims.core.vo.lookups.LateralityLRB(instance7.getId(),instance7.getText(), instance7.isActive(), null, img, color);
ims.core.vo.lookups.LateralityLRB parentVoLookup7 = voLookup7;
ims.domain.lookups.LookupInstance parent7 = instance7.getParent();
while (parent7 != null)
{
if (parent7.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent7.getImage().getImageId(), parent7.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent7.getColor();
if (color != null)
color.getValue();
parentVoLookup7.setParent(new ims.core.vo.lookups.LateralityLRB(parent7.getId(),parent7.getText(), parent7.isActive(), null, img, color));
parentVoLookup7 = parentVoLookup7.getParent();
parent7 = parent7.getParent();
}
valueObject.setDiagLaterality(voLookup7);
}
// SiteText
valueObject.setSiteText(domainObject.getSiteText());
// isPMH
valueObject.setIsPMH( domainObject.isIsPMH() );
// isCancerDiagnosis
valueObject.setIsCancerDiagnosis( domainObject.isIsCancerDiagnosis() );
// AuthoringInfo
valueObject.setAuthoringInfo(ims.core.vo.domain.AuthoringInformationVoAssembler.create(map, domainObject.getAuthoringInfo()) );
// DiagnosisDescription
valueObject.setDiagnosisDescription(domainObject.getDiagnosisDescription());
// DiagnosedDate
Integer DiagnosedDate = domainObject.getDiagnosedDate();
if ( null != DiagnosedDate )
{
valueObject.setDiagnosedDate(new ims.framework.utils.PartialDate(DiagnosedDate) );
}
// CurrentStatus
valueObject.setCurrentStatus(ims.clinical.vo.domain.PatientDiagnosisStatusVoAssembler.create(map, domainObject.getCurrentStatus()) );
// SourceofInformation
ims.domain.lookups.LookupInstance instance15 = domainObject.getSourceofInformation();
if ( null != instance15 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance15.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance15.getImage().getImageId(), instance15.getImage().getImagePath());
}
color = instance15.getColor();
if (color != null)
color.getValue();
ims.core.vo.lookups.SourceofInformation voLookup15 = new ims.core.vo.lookups.SourceofInformation(instance15.getId(),instance15.getText(), instance15.isActive(), null, img, color);
ims.core.vo.lookups.SourceofInformation parentVoLookup15 = voLookup15;
ims.domain.lookups.LookupInstance parent15 = instance15.getParent();
while (parent15 != null)
{
if (parent15.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent15.getImage().getImageId(), parent15.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent15.getColor();
if (color != null)
color.getValue();
parentVoLookup15.setParent(new ims.core.vo.lookups.SourceofInformation(parent15.getId(),parent15.getText(), parent15.isActive(), null, img, color));
parentVoLookup15 = parentVoLookup15.getParent();
parent15 = parent15.getParent();
}
valueObject.setSourceofInformation(voLookup15);
}
// Diagnosis
valueObject.setDiagnosis(ims.core.vo.domain.DiagLiteVoAssembler.create(map, domainObject.getDiagnosis()) );
// CareContext
valueObject.setCareContext(ims.core.vo.domain.CareContextLiteVoAssembler.create(map, domainObject.getCareContext()) );
// isComorbidity
valueObject.setIsComorbidity( domainObject.isIsComorbidity() );
// EpisodeOfCare
if (domainObject.getEpisodeOfCare() != null)
{
if(domainObject.getEpisodeOfCare() instanceof HibernateProxy) // If the proxy is set, there is no need to lazy load, the proxy knows the id already.
{
HibernateProxy p = (HibernateProxy) domainObject.getEpisodeOfCare();
int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString());
valueObject.setEpisodeOfCare(new ims.core.admin.vo.EpisodeOfCareRefVo(id, -1));
}
else
{
valueObject.setEpisodeOfCare(new ims.core.admin.vo.EpisodeOfCareRefVo(domainObject.getEpisodeOfCare().getId(), domainObject.getEpisodeOfCare().getVersion()));
}
}
return valueObject;
} | static ims.core.vo.PatientDiagnosisListVo function(DomainObjectMap map, ims.core.vo.PatientDiagnosisListVo valueObject, ims.core.clinical.domain.objects.PatientDiagnosis domainObject) { if (null == domainObject) { return valueObject; } if (null == map) { map = new DomainObjectMap(); } valueObject.setID_PatientDiagnosis(domainObject.getId()); valueObject.setIsRIE(domainObject.getIsRIE()); if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord()) return null; if ((valueObject.getIsRIE() == null valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord()) return null; valueObject.setPrimaryForCareSpells(ims.core.vo.domain.CSPrimaryDiagnosisVoAssembler.createCSPrimaryDiagnosisVoCollectionFromCsPrimaryDiagnosis(map, domainObject.getPrimaryForCareSpells()) ); valueObject.setIsComplication( domainObject.isIsComplication() ); java.util.Date DateResolved = domainObject.getDateResolved(); if ( null != DateResolved ) { valueObject.setDateResolved(new ims.framework.utils.Date(DateResolved) ); } valueObject.setSysInfo(ims.vo.domain.SystemInformationAssembler.create(domainObject.getSystemInformation())); java.util.Date DateOnset = domainObject.getDateOnset(); if ( null != DateOnset ) { valueObject.setDateOnset(new ims.framework.utils.Date(DateOnset) ); } valueObject.setClinicalContact(ims.core.vo.domain.ClinicalContactShortVoAssembler.create(map, domainObject.getClinicalContact()) ); ims.domain.lookups.LookupInstance instance7 = domainObject.getDiagLaterality(); if ( null != instance7 ) { ims.framework.utils.ImagePath img = null; ims.framework.utils.Color color = null; img = null; if (instance7.getImage() != null) { img = new ims.framework.utils.ImagePath(instance7.getImage().getImageId(), instance7.getImage().getImagePath()); } color = instance7.getColor(); if (color != null) color.getValue(); ims.core.vo.lookups.LateralityLRB voLookup7 = new ims.core.vo.lookups.LateralityLRB(instance7.getId(),instance7.getText(), instance7.isActive(), null, img, color); ims.core.vo.lookups.LateralityLRB parentVoLookup7 = voLookup7; ims.domain.lookups.LookupInstance parent7 = instance7.getParent(); while (parent7 != null) { if (parent7.getImage() != null) { img = new ims.framework.utils.ImagePath(parent7.getImage().getImageId(), parent7.getImage().getImagePath() ); } else { img = null; } color = parent7.getColor(); if (color != null) color.getValue(); parentVoLookup7.setParent(new ims.core.vo.lookups.LateralityLRB(parent7.getId(),parent7.getText(), parent7.isActive(), null, img, color)); parentVoLookup7 = parentVoLookup7.getParent(); parent7 = parent7.getParent(); } valueObject.setDiagLaterality(voLookup7); } valueObject.setSiteText(domainObject.getSiteText()); valueObject.setIsPMH( domainObject.isIsPMH() ); valueObject.setIsCancerDiagnosis( domainObject.isIsCancerDiagnosis() ); valueObject.setAuthoringInfo(ims.core.vo.domain.AuthoringInformationVoAssembler.create(map, domainObject.getAuthoringInfo()) ); valueObject.setDiagnosisDescription(domainObject.getDiagnosisDescription()); Integer DiagnosedDate = domainObject.getDiagnosedDate(); if ( null != DiagnosedDate ) { valueObject.setDiagnosedDate(new ims.framework.utils.PartialDate(DiagnosedDate) ); } valueObject.setCurrentStatus(ims.clinical.vo.domain.PatientDiagnosisStatusVoAssembler.create(map, domainObject.getCurrentStatus()) ); ims.domain.lookups.LookupInstance instance15 = domainObject.getSourceofInformation(); if ( null != instance15 ) { ims.framework.utils.ImagePath img = null; ims.framework.utils.Color color = null; img = null; if (instance15.getImage() != null) { img = new ims.framework.utils.ImagePath(instance15.getImage().getImageId(), instance15.getImage().getImagePath()); } color = instance15.getColor(); if (color != null) color.getValue(); ims.core.vo.lookups.SourceofInformation voLookup15 = new ims.core.vo.lookups.SourceofInformation(instance15.getId(),instance15.getText(), instance15.isActive(), null, img, color); ims.core.vo.lookups.SourceofInformation parentVoLookup15 = voLookup15; ims.domain.lookups.LookupInstance parent15 = instance15.getParent(); while (parent15 != null) { if (parent15.getImage() != null) { img = new ims.framework.utils.ImagePath(parent15.getImage().getImageId(), parent15.getImage().getImagePath() ); } else { img = null; } color = parent15.getColor(); if (color != null) color.getValue(); parentVoLookup15.setParent(new ims.core.vo.lookups.SourceofInformation(parent15.getId(),parent15.getText(), parent15.isActive(), null, img, color)); parentVoLookup15 = parentVoLookup15.getParent(); parent15 = parent15.getParent(); } valueObject.setSourceofInformation(voLookup15); } valueObject.setDiagnosis(ims.core.vo.domain.DiagLiteVoAssembler.create(map, domainObject.getDiagnosis()) ); valueObject.setCareContext(ims.core.vo.domain.CareContextLiteVoAssembler.create(map, domainObject.getCareContext()) ); valueObject.setIsComorbidity( domainObject.isIsComorbidity() ); if (domainObject.getEpisodeOfCare() != null) { if(domainObject.getEpisodeOfCare() instanceof HibernateProxy) { HibernateProxy p = (HibernateProxy) domainObject.getEpisodeOfCare(); int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString()); valueObject.setEpisodeOfCare(new ims.core.admin.vo.EpisodeOfCareRefVo(id, -1)); } else { valueObject.setEpisodeOfCare(new ims.core.admin.vo.EpisodeOfCareRefVo(domainObject.getEpisodeOfCare().getId(), domainObject.getEpisodeOfCare().getVersion())); } } return valueObject; } | /**
* Update the ValueObject with the Domain Object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param valueObject to be updated
* @param domainObject ims.core.clinical.domain.objects.PatientDiagnosis
*/ | Update the ValueObject with the Domain Object | insert | {
"repo_name": "open-health-hub/openMAXIMS",
"path": "openmaxims_workspace/ValueObjects/src/ims/core/vo/domain/PatientDiagnosisListVoAssembler.java",
"license": "agpl-3.0",
"size": 28037
} | [
"org.hibernate.proxy.HibernateProxy"
] | import org.hibernate.proxy.HibernateProxy; | import org.hibernate.proxy.*; | [
"org.hibernate.proxy"
] | org.hibernate.proxy; | 804,053 |
@Transactional(readOnly = true)
public List<MeasurementTemplate> findTemplates(String type, String cat, Integer[] excludeIds,
PageControl pc) {
List<MeasurementTemplate> templates;
if (cat == null) {
templates = measurementTemplateDAO.findTemplatesByMonitorableType(type);
} else {
templates = measurementTemplateDAO.findTemplatesByMonitorableTypeAndCategory(type, cat);
}
if (templates == null) {
return new PageList<MeasurementTemplate>();
}
// Handle excludes
List<MeasurementTemplate> includes;
if (excludeIds == null) {
includes = templates;
} else {
HashSet<Integer> excludes = new HashSet<Integer>(Arrays.asList(excludeIds));
includes = new ArrayList<MeasurementTemplate>();
for (MeasurementTemplate tmpl : templates) {
if (!excludes.contains(tmpl.getId()))
includes.add(tmpl);
}
}
pc = PageControl.initDefaults(pc, -1);
if (pc.getSortorder() == PageControl.SORT_DESC) {
Collections.reverse(includes);
}
return templates;
} | @Transactional(readOnly = true) List<MeasurementTemplate> function(String type, String cat, Integer[] excludeIds, PageControl pc) { List<MeasurementTemplate> templates; if (cat == null) { templates = measurementTemplateDAO.findTemplatesByMonitorableType(type); } else { templates = measurementTemplateDAO.findTemplatesByMonitorableTypeAndCategory(type, cat); } if (templates == null) { return new PageList<MeasurementTemplate>(); } List<MeasurementTemplate> includes; if (excludeIds == null) { includes = templates; } else { HashSet<Integer> excludes = new HashSet<Integer>(Arrays.asList(excludeIds)); includes = new ArrayList<MeasurementTemplate>(); for (MeasurementTemplate tmpl : templates) { if (!excludes.contains(tmpl.getId())) includes.add(tmpl); } } pc = PageControl.initDefaults(pc, -1); if (pc.getSortorder() == PageControl.SORT_DESC) { Collections.reverse(includes); } return templates; } | /**
* Look up a measurement templates for a monitorable type and category.
*
* @return a MeasurementTemplate value
*/ | Look up a measurement templates for a monitorable type and category | findTemplates | {
"repo_name": "cc14514/hq6",
"path": "hq-server/src/main/java/org/hyperic/hq/measurement/server/session/TemplateManagerImpl.java",
"license": "unlicense",
"size": 18945
} | [
"java.util.ArrayList",
"java.util.Arrays",
"java.util.Collections",
"java.util.HashSet",
"java.util.List",
"org.hyperic.util.pager.PageControl",
"org.hyperic.util.pager.PageList",
"org.springframework.transaction.annotation.Transactional"
] | import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import org.hyperic.util.pager.PageControl; import org.hyperic.util.pager.PageList; import org.springframework.transaction.annotation.Transactional; | import java.util.*; import org.hyperic.util.pager.*; import org.springframework.transaction.annotation.*; | [
"java.util",
"org.hyperic.util",
"org.springframework.transaction"
] | java.util; org.hyperic.util; org.springframework.transaction; | 170,597 |
@Test
@WithSystemProperty(key = IGNITE_ENABLE_EXPERIMENTAL_COMMAND, value = "true")
public void testContainsNotExperimentalCmdInHelpOutputWhenEnableExperimentalTrue() {
checkContainsNotExperimentalCmdInHelpOutput();
} | @WithSystemProperty(key = IGNITE_ENABLE_EXPERIMENTAL_COMMAND, value = "true") void function() { checkContainsNotExperimentalCmdInHelpOutput(); } | /**
* Test is that when the --help control.sh command is executed, output
* will contain non-experimental commands. In case system property
* {@link IgniteSystemProperties#IGNITE_ENABLE_EXPERIMENTAL_COMMAND} =
* {@code true}.
*/ | Test is that when the --help control.sh command is executed, output will contain non-experimental commands. In case system property <code>IgniteSystemProperties#IGNITE_ENABLE_EXPERIMENTAL_COMMAND</code> = true | testContainsNotExperimentalCmdInHelpOutputWhenEnableExperimentalTrue | {
"repo_name": "ascherbakoff/ignite",
"path": "modules/control-utility/src/test/java/org/apache/ignite/util/GridCommandHandlerClusterByClassTest.java",
"license": "apache-2.0",
"size": 62940
} | [
"org.apache.ignite.testframework.junits.WithSystemProperty"
] | import org.apache.ignite.testframework.junits.WithSystemProperty; | import org.apache.ignite.testframework.junits.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 998,549 |
TypeInfo getTypeInfo(); | TypeInfo getTypeInfo(); | /**
* Returns the type info.
* @return the type info
*/ | Returns the type info | getTypeInfo | {
"repo_name": "akirakw/asakusafw",
"path": "hive-project/core-stub/src/main/java/com/asakusafw/directio/hive/serde/ValueSerde.java",
"license": "apache-2.0",
"size": 1583
} | [
"org.apache.hadoop.hive.serde2.typeinfo.TypeInfo"
] | import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; | import org.apache.hadoop.hive.serde2.typeinfo.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 639,748 |
private static long readLong(BufferedReader reader) {
try {
return Long.parseLong(reader.readLine());
} catch (IOException ioe) {
return 100;
} catch (NumberFormatException nfe) {
return 0;
}
} | static long function(BufferedReader reader) { try { return Long.parseLong(reader.readLine()); } catch (IOException ioe) { return 100; } catch (NumberFormatException nfe) { return 0; } } | /**
* Reads a number from the an input reader.
* @param reader The input reader from which to read a number.
* @return The number that was inputted.
*/ | Reads a number from the an input reader | readLong | {
"repo_name": "edvardvb/tdt4186-os",
"path": "os-oving/src/round_robin/Application.java",
"license": "mit",
"size": 2733
} | [
"java.io.BufferedReader",
"java.io.IOException"
] | import java.io.BufferedReader; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,850,740 |
public @NotNull NextAction processResponse(Packet response) {
return doReturnWith(response);
} | @NotNull NextAction function(Packet response) { return doReturnWith(response); } | /**
* Default no-op implementation.
*/ | Default no-op implementation | processResponse | {
"repo_name": "FauxFaux/jdk9-jaxws",
"path": "src/java.xml.ws/share/classes/com/sun/xml/internal/ws/api/pipe/helper/AbstractFilterTubeImpl.java",
"license": "gpl-2.0",
"size": 2820
} | [
"com.sun.istack.internal.NotNull",
"com.sun.xml.internal.ws.api.message.Packet",
"com.sun.xml.internal.ws.api.pipe.NextAction"
] | import com.sun.istack.internal.NotNull; import com.sun.xml.internal.ws.api.message.Packet; import com.sun.xml.internal.ws.api.pipe.NextAction; | import com.sun.istack.internal.*; import com.sun.xml.internal.ws.api.message.*; import com.sun.xml.internal.ws.api.pipe.*; | [
"com.sun.istack",
"com.sun.xml"
] | com.sun.istack; com.sun.xml; | 187,795 |
void checkResult(NodeType nodeType, String[] baseDirs,
boolean currentShouldExist, boolean previousShouldExist)
throws IOException
{
switch (nodeType) {
case NAME_NODE:
if (currentShouldExist) {
for (int i = 0; i < baseDirs.length; i++) {
assertTrue(new File(baseDirs[i],"current").isDirectory());
assertTrue(new File(baseDirs[i],"current/VERSION").isFile());
assertTrue(new File(baseDirs[i],"current/edits").isFile());
assertTrue(new File(baseDirs[i],"current/fsimage").isFile());
assertTrue(new File(baseDirs[i],"current/fstime").isFile());
}
}
break;
case DATA_NODE:
if (currentShouldExist) {
for (int i = 0; i < baseDirs.length; i++) {
assertEquals(
UpgradeUtilities.checksumContents(
nodeType, new File(baseDirs[i],"current")),
UpgradeUtilities.checksumMasterContents(nodeType));
}
}
break;
}
if (previousShouldExist) {
for (int i = 0; i < baseDirs.length; i++) {
assertTrue(new File(baseDirs[i],"previous").isDirectory());
assertEquals(
UpgradeUtilities.checksumContents(
nodeType, new File(baseDirs[i],"previous")),
UpgradeUtilities.checksumMasterContents(nodeType));
}
}
} | void checkResult(NodeType nodeType, String[] baseDirs, boolean currentShouldExist, boolean previousShouldExist) throws IOException { switch (nodeType) { case NAME_NODE: if (currentShouldExist) { for (int i = 0; i < baseDirs.length; i++) { assertTrue(new File(baseDirs[i],STR).isDirectory()); assertTrue(new File(baseDirs[i],STR).isFile()); assertTrue(new File(baseDirs[i],STR).isFile()); assertTrue(new File(baseDirs[i],STR).isFile()); assertTrue(new File(baseDirs[i],STR).isFile()); } } break; case DATA_NODE: if (currentShouldExist) { for (int i = 0; i < baseDirs.length; i++) { assertEquals( UpgradeUtilities.checksumContents( nodeType, new File(baseDirs[i],STR)), UpgradeUtilities.checksumMasterContents(nodeType)); } } break; } if (previousShouldExist) { for (int i = 0; i < baseDirs.length; i++) { assertTrue(new File(baseDirs[i],STR).isDirectory()); assertEquals( UpgradeUtilities.checksumContents( nodeType, new File(baseDirs[i],STR)), UpgradeUtilities.checksumMasterContents(nodeType)); } } } | /**
* Verify that the current and/or previous exist as indicated by
* the method parameters. If previous exists, verify that
* it hasn't been modified by comparing the checksum of all it's
* containing files with their original checksum. It is assumed that
* the server has recovered.
*/ | Verify that the current and/or previous exist as indicated by the method parameters. If previous exists, verify that it hasn't been modified by comparing the checksum of all it's containing files with their original checksum. It is assumed that the server has recovered | checkResult | {
"repo_name": "Seagate/hadoop-on-lustre",
"path": "src/test/org/apache/hadoop/hdfs/TestDFSStorageStateRecovery.java",
"license": "apache-2.0",
"size": 12797
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.HdfsConstants"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.hdfs.server.common.HdfsConstants; | import java.io.*; import org.apache.hadoop.hdfs.server.common.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,698,651 |
@Test
public void simpleTextAdd() throws Exception
{
String oldText = "<p> This is a blue book</p>";
String newText = "<p> This is a big blue book</p>";
String result = HtmlTestFixture.diff(oldText, newText);
assertTrue("Expected an addition",result.indexOf("<p> This is a <span class=\"diff-html-added\"") > -1);
}
| void function() throws Exception { String oldText = STR; String newText = STR; String result = HtmlTestFixture.diff(oldText, newText); assertTrue(STR,result.indexOf(STRdiff-html-added\"") > -1); } | /**
* Adding a single word.
* @throws Exception something went wrong.
*/ | Adding a single word | simpleTextAdd | {
"repo_name": "jyd519/daisydiff",
"path": "src/test/java/org/outerj/daisy/diff/html/HTMLDifferTest.java",
"license": "apache-2.0",
"size": 3647
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 189,775 |
protected SparseArray<ReleaseItem> readChangeLog(XmlPullParser xml, boolean full) {
SparseArray<ReleaseItem> result = new SparseArray<ReleaseItem>();
try {
int eventType = xml.getEventType();
while (eventType != XmlPullParser.END_DOCUMENT) {
if (eventType == XmlPullParser.START_TAG && xml.getName().equals(ReleaseTag.NAME)) {
if (parseReleaseTag(xml, full, result)) {
// Stop reading more elements if this entry is not newer than the last
// version.
break;
}
}
eventType = xml.next();
}
} catch (XmlPullParserException e) {
Log.e(LOG_TAG, e.getMessage(), e);
} catch (IOException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return result;
} | SparseArray<ReleaseItem> function(XmlPullParser xml, boolean full) { SparseArray<ReleaseItem> result = new SparseArray<ReleaseItem>(); try { int eventType = xml.getEventType(); while (eventType != XmlPullParser.END_DOCUMENT) { if (eventType == XmlPullParser.START_TAG && xml.getName().equals(ReleaseTag.NAME)) { if (parseReleaseTag(xml, full, result)) { break; } } eventType = xml.next(); } } catch (XmlPullParserException e) { Log.e(LOG_TAG, e.getMessage(), e); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage(), e); } return result; } | /**
* Read the change log from an XML file.
*
* @param xml
* The {@code XmlPullParser} instance used to read the change log.
* @param full
* If {@code true} the full change log is read. Otherwise only the changes since the
* last (saved) version are read.
*
* @return A {@code SparseArray} mapping the version codes to release information.
*/ | Read the change log from an XML file | readChangeLog | {
"repo_name": "KBerstene/Subsonic",
"path": "app/src/main/java/github/daneren2005/dsub/view/ChangeLog.java",
"license": "gpl-3.0",
"size": 18475
} | [
"android.util.Log",
"android.util.SparseArray",
"java.io.IOException",
"org.xmlpull.v1.XmlPullParser",
"org.xmlpull.v1.XmlPullParserException"
] | import android.util.Log; import android.util.SparseArray; import java.io.IOException; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; | import android.util.*; import java.io.*; import org.xmlpull.v1.*; | [
"android.util",
"java.io",
"org.xmlpull.v1"
] | android.util; java.io; org.xmlpull.v1; | 505,708 |
private void testReleaseMemory(final ResultPartitionType resultPartitionType) throws Exception {
final int numAllBuffers = 10;
final NettyShuffleEnvironment network = new NettyShuffleEnvironmentBuilder()
.setNumNetworkBuffers(numAllBuffers).build();
final ResultPartition resultPartition = createPartition(network, resultPartitionType, 1);
try {
resultPartition.setup();
// take all buffers (more than the minimum required)
for (int i = 0; i < numAllBuffers; ++i) {
BufferBuilder bufferBuilder = resultPartition.getBufferPool().requestBufferBuilderBlocking();
resultPartition.addBufferConsumer(bufferBuilder.createBufferConsumer(), 0);
}
resultPartition.finish();
assertEquals(0, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments());
// reset the pool size less than the number of requested buffers
final int numLocalBuffers = 4;
resultPartition.getBufferPool().setNumBuffers(numLocalBuffers);
// partition with blocking type should release excess buffers
if (!resultPartitionType.hasBackPressure()) {
assertEquals(numLocalBuffers, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments());
} else {
assertEquals(0, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments());
}
} finally {
resultPartition.release();
network.close();
}
} | void function(final ResultPartitionType resultPartitionType) throws Exception { final int numAllBuffers = 10; final NettyShuffleEnvironment network = new NettyShuffleEnvironmentBuilder() .setNumNetworkBuffers(numAllBuffers).build(); final ResultPartition resultPartition = createPartition(network, resultPartitionType, 1); try { resultPartition.setup(); for (int i = 0; i < numAllBuffers; ++i) { BufferBuilder bufferBuilder = resultPartition.getBufferPool().requestBufferBuilderBlocking(); resultPartition.addBufferConsumer(bufferBuilder.createBufferConsumer(), 0); } resultPartition.finish(); assertEquals(0, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments()); final int numLocalBuffers = 4; resultPartition.getBufferPool().setNumBuffers(numLocalBuffers); if (!resultPartitionType.hasBackPressure()) { assertEquals(numLocalBuffers, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments()); } else { assertEquals(0, resultPartition.getBufferPool().getNumberOfAvailableMemorySegments()); } } finally { resultPartition.release(); network.close(); } } | /**
* Tests {@link ResultPartition#releaseMemory(int)} on a working partition.
*
* @param resultPartitionType the result partition type to set up
*/ | Tests <code>ResultPartition#releaseMemory(int)</code> on a working partition | testReleaseMemory | {
"repo_name": "gyfora/flink",
"path": "flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/ResultPartitionTest.java",
"license": "apache-2.0",
"size": 15340
} | [
"org.apache.flink.runtime.io.network.NettyShuffleEnvironment",
"org.apache.flink.runtime.io.network.NettyShuffleEnvironmentBuilder",
"org.apache.flink.runtime.io.network.buffer.BufferBuilder",
"org.apache.flink.runtime.io.network.partition.PartitionTestUtils",
"org.junit.Assert"
] | import org.apache.flink.runtime.io.network.NettyShuffleEnvironment; import org.apache.flink.runtime.io.network.NettyShuffleEnvironmentBuilder; import org.apache.flink.runtime.io.network.buffer.BufferBuilder; import org.apache.flink.runtime.io.network.partition.PartitionTestUtils; import org.junit.Assert; | import org.apache.flink.runtime.io.network.*; import org.apache.flink.runtime.io.network.buffer.*; import org.apache.flink.runtime.io.network.partition.*; import org.junit.*; | [
"org.apache.flink",
"org.junit"
] | org.apache.flink; org.junit; | 849,977 |
public Cancellable startDatafeedAsync(StartDatafeedRequest request, RequestOptions options,
ActionListener<StartDatafeedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::startDatafeed,
options,
StartDatafeedResponse::fromXContent,
listener,
Collections.emptySet());
} | Cancellable function(StartDatafeedRequest request, RequestOptions options, ActionListener<StartDatafeedResponse> listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::startDatafeed, options, StartDatafeedResponse::fromXContent, listener, Collections.emptySet()); } | /**
* Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html">
* ML Start Datafeed documentation</a>
*
* @param request The request to start the datafeed
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/ | Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion For additional info see ML Start Datafeed documentation | startDatafeedAsync | {
"repo_name": "uschindler/elasticsearch",
"path": "client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java",
"license": "apache-2.0",
"size": 130429
} | [
"java.util.Collections",
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.client.ml.StartDatafeedRequest",
"org.elasticsearch.client.ml.StartDatafeedResponse"
] | import java.util.Collections; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.ml.StartDatafeedRequest; import org.elasticsearch.client.ml.StartDatafeedResponse; | import java.util.*; import org.elasticsearch.action.*; import org.elasticsearch.client.ml.*; | [
"java.util",
"org.elasticsearch.action",
"org.elasticsearch.client"
] | java.util; org.elasticsearch.action; org.elasticsearch.client; | 293,373 |
public ResultSet query(String hsql)
throws SQLException
{
AmberQuery query = prepareQuery(hsql);
return query.executeQuery();
} | ResultSet function(String hsql) throws SQLException { AmberQuery query = prepareQuery(hsql); return query.executeQuery(); } | /**
* Select a list of objects with a Hibernate query.
*
* @param hsql the hibernate query
*
* @return the query results.
*/ | Select a list of objects with a Hibernate query | query | {
"repo_name": "dlitz/resin",
"path": "modules/resin/src/com/caucho/amber/manager/AmberConnection.java",
"license": "gpl-2.0",
"size": 89822
} | [
"com.caucho.amber.AmberQuery",
"java.sql.ResultSet",
"java.sql.SQLException"
] | import com.caucho.amber.AmberQuery; import java.sql.ResultSet; import java.sql.SQLException; | import com.caucho.amber.*; import java.sql.*; | [
"com.caucho.amber",
"java.sql"
] | com.caucho.amber; java.sql; | 2,310,812 |
private void executeUnlinkBannerWithError(Object[] params, String errorMsg)
throws MalformedURLException {
try {
execute(ZONE_UNLINK_BANNER_METHOD, params);
fail(ErrorMessage.METHOD_EXECUTED_SUCCESSFULLY_BUT_SHOULD_NOT_HAVE);
} catch (XmlRpcException e) {
assertEquals(ErrorMessage.WRONG_ERROR_MESSAGE, errorMsg, e
.getMessage());
}
} | void function(Object[] params, String errorMsg) throws MalformedURLException { try { execute(ZONE_UNLINK_BANNER_METHOD, params); fail(ErrorMessage.METHOD_EXECUTED_SUCCESSFULLY_BUT_SHOULD_NOT_HAVE); } catch (XmlRpcException e) { assertEquals(ErrorMessage.WRONG_ERROR_MESSAGE, errorMsg, e .getMessage()); } } | /**
* Execute test method with error
*
* @param params -
* parameters for test method
* @param errorMsg -
* true error messages
* @throws MalformedURLException
*/ | Execute test method with error | executeUnlinkBannerWithError | {
"repo_name": "Mordred/revive-adserver",
"path": "www/api/v2/xmlrpc/tests/unit/src/test/java/org/openx/zone/TestZoneUnlinkBanner.java",
"license": "gpl-2.0",
"size": 4892
} | [
"java.net.MalformedURLException",
"org.apache.xmlrpc.XmlRpcException",
"org.openx.utils.ErrorMessage"
] | import java.net.MalformedURLException; import org.apache.xmlrpc.XmlRpcException; import org.openx.utils.ErrorMessage; | import java.net.*; import org.apache.xmlrpc.*; import org.openx.utils.*; | [
"java.net",
"org.apache.xmlrpc",
"org.openx.utils"
] | java.net; org.apache.xmlrpc; org.openx.utils; | 543,533 |
public final FileSystem getFs() {
return fs;
} | final FileSystem function() { return fs; } | /**
* Get the file system
*
* @return file system
*/ | Get the file system | getFs | {
"repo_name": "renato2099/giraph-gora",
"path": "giraph-core/src/main/java/org/apache/giraph/bsp/BspService.java",
"license": "apache-2.0",
"size": 40812
} | [
"org.apache.hadoop.fs.FileSystem"
] | import org.apache.hadoop.fs.FileSystem; | import org.apache.hadoop.fs.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,236,780 |
void defineCatch(Node n) {
assertDefinitionNode(n, Token.CATCH);
// Though almost certainly a terrible idea, it is possible to do destructuring in
// the catch declaration.
// e.g. `} catch ({message, errno}) {`
for (Node catchName : NodeUtil.findLhsNodesInNode(n)) {
JSType type = getDeclaredType(catchName.getJSDocInfo(), catchName, null, null);
new SlotDefiner()
.forDeclarationNode(catchName)
.forVariableName(catchName.getString())
.inScope(currentScope)
.withType(type)
.allowLaterTypeInference(type == null)
.defineSlot();
}
} | void defineCatch(Node n) { assertDefinitionNode(n, Token.CATCH); for (Node catchName : NodeUtil.findLhsNodesInNode(n)) { JSType type = getDeclaredType(catchName.getJSDocInfo(), catchName, null, null); new SlotDefiner() .forDeclarationNode(catchName) .forVariableName(catchName.getString()) .inScope(currentScope) .withType(type) .allowLaterTypeInference(type == null) .defineSlot(); } } | /**
* Defines a catch parameter.
*/ | Defines a catch parameter | defineCatch | {
"repo_name": "tiobe/closure-compiler",
"path": "src/com/google/javascript/jscomp/TypedScopeCreator.java",
"license": "apache-2.0",
"size": 121532
} | [
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.Token",
"com.google.javascript.rhino.jstype.JSType"
] | import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.JSType; | import com.google.javascript.rhino.*; import com.google.javascript.rhino.jstype.*; | [
"com.google.javascript"
] | com.google.javascript; | 1,810,725 |
static <T> Callable<T> decorateCallable(RateLimiter rateLimiter, int permits,
Callable<T> callable) {
return () -> {
waitForPermission(rateLimiter, permits);
return callable.call();
};
} | static <T> Callable<T> decorateCallable(RateLimiter rateLimiter, int permits, Callable<T> callable) { return () -> { waitForPermission(rateLimiter, permits); return callable.call(); }; } | /**
* Creates a callable which is restricted by a RateLimiter.
*
* @param rateLimiter the RateLimiter
* @param permits number of permits that this call requires
* @param callable the original callable
* @param <T> the type of results supplied by callable
* @return a callable which is restricted by a RateLimiter.
*/ | Creates a callable which is restricted by a RateLimiter | decorateCallable | {
"repo_name": "drmaas/resilience4j",
"path": "resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/RateLimiter.java",
"license": "apache-2.0",
"size": 33535
} | [
"java.util.concurrent.Callable"
] | import java.util.concurrent.Callable; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 2,770,055 |
public List<StorageBlobLocation> getFilesOnServer(ObjectStorageServer server) {
ArrayList<StorageBlobLocation> result = new ArrayList<>();
for (CdmiId o : physicalFileLocations.keySet()) {
for (StorageBlobLocation location : physicalFileLocations.get(o)) {
if (location.getServer().equals(server))
result.add(location);
}
}
return result;
} | List<StorageBlobLocation> function(ObjectStorageServer server) { ArrayList<StorageBlobLocation> result = new ArrayList<>(); for (CdmiId o : physicalFileLocations.keySet()) { for (StorageBlobLocation location : physicalFileLocations.get(o)) { if (location.getServer().equals(server)) result.add(location); } } return result; } | /**
* Returns a lost of BlobLocators, that describe an object, that is stored inside this container and is put onto the given server
*
* @param server the server to scan
* @return all blobs that are on that server and in this container
*/ | Returns a lost of BlobLocators, that describe an object, that is stored inside this container and is put onto the given server | getFilesOnServer | {
"repo_name": "toebbel/StorageCloudSim",
"path": "src/edu/kit/cloudSimStorage/cdmi/CdmiObjectContainer.java",
"license": "gpl-3.0",
"size": 8857
} | [
"edu.kit.cloudSimStorage.storageModel.ObjectStorageServer",
"edu.kit.cloudSimStorage.storageModel.StorageBlobLocation",
"java.util.ArrayList",
"java.util.List"
] | import edu.kit.cloudSimStorage.storageModel.ObjectStorageServer; import edu.kit.cloudSimStorage.storageModel.StorageBlobLocation; import java.util.ArrayList; import java.util.List; | import edu.kit.*; import java.util.*; | [
"edu.kit",
"java.util"
] | edu.kit; java.util; | 2,805,629 |
static boolean hasSameComparator(
Iterable<?> elements, Comparator<?> comparator) {
if (elements instanceof SortedSet) {
SortedSet<?> sortedSet = (SortedSet<?>) elements;
Comparator<?> comparator2 = sortedSet.comparator();
return (comparator2 == null)
? comparator == Ordering.natural()
: comparator.equals(comparator2);
}
return false;
} | static boolean hasSameComparator( Iterable<?> elements, Comparator<?> comparator) { if (elements instanceof SortedSet) { SortedSet<?> sortedSet = (SortedSet<?>) elements; Comparator<?> comparator2 = sortedSet.comparator(); return (comparator2 == null) ? comparator == Ordering.natural() : comparator.equals(comparator2); } return false; } | /**
* Returns {@code true} if {@code elements} is a {@code SortedSet} that uses
* {@code comparator} to order its elements. Note that equivalent comparators
* may still return {@code false}, if {@code equals} doesn't consider them
* equal. If one comparator is {@code null} and the other is
* {@link Ordering#natural()}, this method returns {@code true}.
*/ | Returns true if elements is a SortedSet that uses comparator to order its elements. Note that equivalent comparators may still return false, if equals doesn't consider them equal. If one comparator is null and the other is <code>Ordering#natural()</code>, this method returns true | hasSameComparator | {
"repo_name": "rowboat/external-guava",
"path": "src/com/google/common/collect/ImmutableSortedSet.java",
"license": "apache-2.0",
"size": 26136
} | [
"java.util.Comparator",
"java.util.SortedSet"
] | import java.util.Comparator; import java.util.SortedSet; | import java.util.*; | [
"java.util"
] | java.util; | 1,517,892 |
public void onActivityResult(int requestCode, int responseCode, Intent intent) {
if (requestCode == RC_RESOLVE) {
// We're coming back from an activity that was launched to resolve a connection
// problem. For example, the sign-in UI.
mExpectingActivityResult = false;
debugLog("onActivityResult, req " + requestCode + " response " + responseCode);
if (responseCode == Activity.RESULT_OK) {
// Ready to try to connect again.
debugLog("responseCode == RESULT_OK. So connecting.");
connectCurrentClient();
}
else {
// Whatever the problem we were trying to solve, it was not solved.
// So give up and show an error message.
debugLog("responseCode != RESULT_OK, so not reconnecting.");
giveUp();
}
}
} | void function(int requestCode, int responseCode, Intent intent) { if (requestCode == RC_RESOLVE) { mExpectingActivityResult = false; debugLog(STR + requestCode + STR + responseCode); if (responseCode == Activity.RESULT_OK) { debugLog(STR); connectCurrentClient(); } else { debugLog(STR); giveUp(); } } } | /**
* Handle activity result. Call this method from your Activity's onActivityResult
* callback. If the activity result pertains to the sign-in process,
* processes it appropriately.
*/ | Handle activity result. Call this method from your Activity's onActivityResult callback. If the activity result pertains to the sign-in process, processes it appropriately | onActivityResult | {
"repo_name": "coronalabs/plugins-source-gamenetwork-google",
"path": "android/src/CoronaProvider/gameNetwork/google/GameHelper.java",
"license": "mit",
"size": 16561
} | [
"android.app.Activity",
"android.content.Intent"
] | import android.app.Activity; import android.content.Intent; | import android.app.*; import android.content.*; | [
"android.app",
"android.content"
] | android.app; android.content; | 15,880 |
public static ImageSource bitmap(Bitmap bitmap) {
if (bitmap == null) {
throw new NullPointerException("Bitmap must not be null");
}
return new ImageSource(bitmap, false);
} | static ImageSource function(Bitmap bitmap) { if (bitmap == null) { throw new NullPointerException(STR); } return new ImageSource(bitmap, false); } | /**
* Provide a loaded bitmap for display.
* @param bitmap bitmap to be displayed.
*/ | Provide a loaded bitmap for display | bitmap | {
"repo_name": "feelfreelinux/WykopMobilny",
"path": "app/src/main/kotlin/io/github/feelfreelinux/wykopmobilny/ui/modules/photoview/ImageSource.java",
"license": "mit",
"size": 7194
} | [
"android.graphics.Bitmap"
] | import android.graphics.Bitmap; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 1,088,730 |
Sheet getSheet(); | Sheet getSheet(); | /**
* Returns first sheet matching the criteria or null.
* @return first sheet matching the criteria or null
*/ | Returns first sheet matching the criteria or null | getSheet | {
"repo_name": "MetadataRegistry/spreadsheet-builder",
"path": "spreadsheet-builder-api/src/main/java/org/modelcatalogue/spreadsheet/query/api/SpreadsheetCriteriaResult.java",
"license": "apache-2.0",
"size": 1369
} | [
"org.modelcatalogue.spreadsheet.api.Sheet"
] | import org.modelcatalogue.spreadsheet.api.Sheet; | import org.modelcatalogue.spreadsheet.api.*; | [
"org.modelcatalogue.spreadsheet"
] | org.modelcatalogue.spreadsheet; | 1,111,945 |
void sendBackPressureStatus(BackPressureStatus bpStatus); | void sendBackPressureStatus(BackPressureStatus bpStatus); | /**
* Sends the back pressure metrics to all downstream connections.
*/ | Sends the back pressure metrics to all downstream connections | sendBackPressureStatus | {
"repo_name": "kishorvpatil/incubator-storm",
"path": "storm-client/src/jvm/org/apache/storm/messaging/IConnection.java",
"license": "apache-2.0",
"size": 1991
} | [
"org.apache.storm.messaging.netty.BackPressureStatus"
] | import org.apache.storm.messaging.netty.BackPressureStatus; | import org.apache.storm.messaging.netty.*; | [
"org.apache.storm"
] | org.apache.storm; | 2,021,198 |
protected boolean checkResponseBody(ConnectionConfig config, String response) {
String expectedResponse = config.getKeyedString(PROPERTY_NAME_RESPONSE_TEXT, null);
if (expectedResponse == null) {
return true;
}
if (expectedResponse.startsWith("~")) {
Pattern bodyPat = Pattern.compile(expectedResponse.substring(1), Pattern.DOTALL);
return bodyPat.matcher(response).matches();
} else {
return response.contains(expectedResponse);
}
} | boolean function(ConnectionConfig config, String response) { String expectedResponse = config.getKeyedString(PROPERTY_NAME_RESPONSE_TEXT, null); if (expectedResponse == null) { return true; } if (expectedResponse.startsWith("~")) { Pattern bodyPat = Pattern.compile(expectedResponse.substring(1), Pattern.DOTALL); return bodyPat.matcher(response).matches(); } else { return response.contains(expectedResponse); } } | /**
* Checks the response body as a substring or regular expression match
* according to the leading-tilde convention
*
* @param config ConnectionConfig object from which response-text property is extracted
* @param response Body of HTTP response to check
* @return Whether the response matches the response-text property
*/ | Checks the response body as a substring or regular expression match according to the leading-tilde convention | checkResponseBody | {
"repo_name": "vishwaAbhinav/OpenNMS",
"path": "opennms-services/src/main/java/org/opennms/netmgt/capsd/plugins/HttpPlugin.java",
"license": "gpl-2.0",
"size": 12329
} | [
"java.util.regex.Pattern",
"org.opennms.netmgt.capsd.ConnectionConfig"
] | import java.util.regex.Pattern; import org.opennms.netmgt.capsd.ConnectionConfig; | import java.util.regex.*; import org.opennms.netmgt.capsd.*; | [
"java.util",
"org.opennms.netmgt"
] | java.util; org.opennms.netmgt; | 2,691,625 |
public boolean rename(File from, File to); | boolean function(File from, File to); | /**
* Renames a file.
* @param from Source file
* @param to Target file
* @return {@code true} if the file has been renamed, {@code false} otherwise
*/ | Renames a file | rename | {
"repo_name": "CURocketry/Ground_Station_GUI",
"path": "src/org/openstreetmap/josm/tools/PlatformHook.java",
"license": "gpl-3.0",
"size": 3573
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 2,839,859 |
static double ensureNonNegative(double value) {
checkArgument(!isNaN(value));
if (value > 0.0) {
return value;
} else {
return 0.0;
}
}
private static final long ONE_BITS = doubleToRawLongBits(1.0); | static double ensureNonNegative(double value) { checkArgument(!isNaN(value)); if (value > 0.0) { return value; } else { return 0.0; } } private static final long ONE_BITS = doubleToRawLongBits(1.0); | /**
* Returns its argument if it is non-negative, zero if it is negative.
*/ | Returns its argument if it is non-negative, zero if it is negative | ensureNonNegative | {
"repo_name": "mike10004/appengine-imaging",
"path": "gaecompat-awt-imaging/src/common/com/gaecompat/repackaged/com/google/common/math/DoubleUtils.java",
"license": "apache-2.0",
"size": 5200
} | [
"com.gaecompat.repackaged.com.google.common.base.Preconditions",
"java.lang.Double"
] | import com.gaecompat.repackaged.com.google.common.base.Preconditions; import java.lang.Double; | import com.gaecompat.repackaged.com.google.common.base.*; import java.lang.*; | [
"com.gaecompat.repackaged",
"java.lang"
] | com.gaecompat.repackaged; java.lang; | 1,940,876 |
public void initializeBlockSet(final int num) throws BlockCreateException {
if (factory == null) {
throw new BlockCreateException();
} else {
this.clear();
for (int i = 0; i < num; i++) {
addLast(factory.create());
}
}
}
| void function(final int num) throws BlockCreateException { if (factory == null) { throw new BlockCreateException(); } else { this.clear(); for (int i = 0; i < num; i++) { addLast(factory.create()); } } } | /**
* Initialize blocks with stuck size {@code num} <br>
*
* @param num Stuck size
* @throws BlockCreateException Can not create block
*/ | Initialize blocks with stuck size num | initializeBlockSet | {
"repo_name": "aratakokubun/new_words",
"path": "src/com/kkbnart/wordis/game/board/NextBlocks.java",
"license": "mit",
"size": 3032
} | [
"com.kkbnart.wordis.exception.BlockCreateException"
] | import com.kkbnart.wordis.exception.BlockCreateException; | import com.kkbnart.wordis.exception.*; | [
"com.kkbnart.wordis"
] | com.kkbnart.wordis; | 1,599,092 |
@Override
public Iterator getConsolidatedBalanceRecordCount(Map fieldValues, Collection<String> encumbranceBalanceTypes) {
LOG.debug("getBalanceRecordCount() started");
ReportQueryByCriteria query = this.getBalanceCountQuery(fieldValues, encumbranceBalanceTypes);
return getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(query);
} | Iterator function(Map fieldValues, Collection<String> encumbranceBalanceTypes) { LOG.debug(STR); ReportQueryByCriteria query = this.getBalanceCountQuery(fieldValues, encumbranceBalanceTypes); return getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(query); } | /**
* Given a Map of keys to use as a query, if we performed that query as a consolidated query... how many records would we get
* back?
*
* @param fieldValues a Map of values to use as keys to build the query
* @return an Iterator of counts...
* @see org.kuali.kfs.gl.dataaccess.BalanceDao#getConsolidatedBalanceRecordCount(Map, List)
*/ | Given a Map of keys to use as a query, if we performed that query as a consolidated query... how many records would we get back | getConsolidatedBalanceRecordCount | {
"repo_name": "bhutchinson/kfs",
"path": "kfs-core/src/main/java/org/kuali/kfs/gl/dataaccess/impl/BalanceDaoOjb.java",
"license": "agpl-3.0",
"size": 46351
} | [
"java.util.Collection",
"java.util.Iterator",
"java.util.Map",
"org.apache.ojb.broker.query.ReportQueryByCriteria"
] | import java.util.Collection; import java.util.Iterator; import java.util.Map; import org.apache.ojb.broker.query.ReportQueryByCriteria; | import java.util.*; import org.apache.ojb.broker.query.*; | [
"java.util",
"org.apache.ojb"
] | java.util; org.apache.ojb; | 1,248,987 |
public List<String> getTimestampIdList() {
final List<String> timestampIdList = new ArrayList<>();
List<XmlToken> tokens = wrapped.getSignatureOrTimestamp();
if (tokens != null) {
for (XmlToken token : tokens) {
if (token instanceof XmlTimestamp) {
timestampIdList.add(token.getId());
}
}
}
return timestampIdList;
} | List<String> function() { final List<String> timestampIdList = new ArrayList<>(); List<XmlToken> tokens = wrapped.getSignatureOrTimestamp(); if (tokens != null) { for (XmlToken token : tokens) { if (token instanceof XmlTimestamp) { timestampIdList.add(token.getId()); } } } return timestampIdList; } | /**
* This method retrieves the timestamp ids
*
* @return the {@code List} of timestamp id(s) contained in the simpleReport
*/ | This method retrieves the timestamp ids | getTimestampIdList | {
"repo_name": "openlimit-signcubes/dss",
"path": "dss-simple-report-jaxb/src/main/java/eu/europa/esig/dss/simplereport/SimpleReport.java",
"license": "lgpl-2.1",
"size": 12053
} | [
"eu.europa.esig.dss.simplereport.jaxb.XmlTimestamp",
"eu.europa.esig.dss.simplereport.jaxb.XmlToken",
"java.util.ArrayList",
"java.util.List"
] | import eu.europa.esig.dss.simplereport.jaxb.XmlTimestamp; import eu.europa.esig.dss.simplereport.jaxb.XmlToken; import java.util.ArrayList; import java.util.List; | import eu.europa.esig.dss.simplereport.jaxb.*; import java.util.*; | [
"eu.europa.esig",
"java.util"
] | eu.europa.esig; java.util; | 706,347 |
@Override
public Adapter createRuleMediatorInputConnectorAdapter() {
if (ruleMediatorInputConnectorItemProvider == null) {
ruleMediatorInputConnectorItemProvider = new RuleMediatorInputConnectorItemProvider(this);
}
return ruleMediatorInputConnectorItemProvider;
}
protected RuleMediatorOutputConnectorItemProvider ruleMediatorOutputConnectorItemProvider; | Adapter function() { if (ruleMediatorInputConnectorItemProvider == null) { ruleMediatorInputConnectorItemProvider = new RuleMediatorInputConnectorItemProvider(this); } return ruleMediatorInputConnectorItemProvider; } protected RuleMediatorOutputConnectorItemProvider ruleMediatorOutputConnectorItemProvider; | /**
* This creates an adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.RuleMediatorInputConnector}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This creates an adapter for a <code>org.wso2.developerstudio.eclipse.gmf.esb.RuleMediatorInputConnector</code>. | createRuleMediatorInputConnectorAdapter | {
"repo_name": "nwnpallewela/devstudio-tooling-esb",
"path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/EsbItemProviderAdapterFactory.java",
"license": "apache-2.0",
"size": 304469
} | [
"org.eclipse.emf.common.notify.Adapter"
] | import org.eclipse.emf.common.notify.Adapter; | import org.eclipse.emf.common.notify.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,044,885 |
void startSweep()
{
this._sweepStartTime = new Date().getTime();
this._marked = false;
}
| void startSweep() { this._sweepStartTime = new Date().getTime(); this._marked = false; } | /**
* Used to start the Mark and Sweep process by setting the marked status to
* false. At the end of the sweep the marked flag should have been set to
* true. If not then this channel has been hungup.
*/ | Used to start the Mark and Sweep process by setting the marked status to false. At the end of the sweep the marked flag should have been set to true. If not then this channel has been hungup | startSweep | {
"repo_name": "scgm11/asterisk-java",
"path": "src/main/java/org/asteriskjava/pbx/internal/core/ChannelImpl.java",
"license": "apache-2.0",
"size": 27889
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 2,762,961 |
public int indexOf(Pair<Component, T> pair) {
ListElement e = new ListElement(pair, false);
return listElements.indexOf(e);
} | int function(Pair<Component, T> pair) { ListElement e = new ListElement(pair, false); return listElements.indexOf(e); } | /**
* Returns the index of the Component - {@code T} pair in this list.
*
* @param pair A Component - {@code T} pair.
* @return The index of the pair.
*/ | Returns the index of the Component - T pair in this list | indexOf | {
"repo_name": "tilastokeskus/Minotaurus",
"path": "Minotaurus/src/main/java/com/github/tilastokeskus/minotaurus/ui/ComponentList.java",
"license": "mit",
"size": 7696
} | [
"com.github.tilastokeskus.minotaurus.util.Pair",
"java.awt.Component"
] | import com.github.tilastokeskus.minotaurus.util.Pair; import java.awt.Component; | import com.github.tilastokeskus.minotaurus.util.*; import java.awt.*; | [
"com.github.tilastokeskus",
"java.awt"
] | com.github.tilastokeskus; java.awt; | 1,698,371 |
private static void initLocatorConstraints() {
Configuration c = harnessConfig.configuration;
try {
testLocatorConstraints =
(MethodConstraints) c.getEntry("test",
"locatorConstraints",
MethodConstraints.class);
} catch (ConfigurationException e) {
if (c instanceof QAConfiguration) {
logger.log(Level.INFO,
"Warning: couldn't init locator constraints",
e);
}
}
} | static void function() { Configuration c = harnessConfig.configuration; try { testLocatorConstraints = (MethodConstraints) c.getEntry("test", STR, MethodConstraints.class); } catch (ConfigurationException e) { if (c instanceof QAConfiguration) { logger.log(Level.INFO, STR, e); } } } | /**
* Internal utility method to intialize the locator constraints from
* the <code>test.locatorConstraints</code> entry of the test
* configuration file.
*/ | Internal utility method to intialize the locator constraints from the <code>test.locatorConstraints</code> entry of the test configuration file | initLocatorConstraints | {
"repo_name": "pfirmstone/river-internet",
"path": "qa/src/org/apache/river/qa/harness/QAConfig.java",
"license": "apache-2.0",
"size": 110034
} | [
"java.util.logging.Level",
"net.jini.config.Configuration",
"net.jini.config.ConfigurationException",
"net.jini.core.constraint.MethodConstraints"
] | import java.util.logging.Level; import net.jini.config.Configuration; import net.jini.config.ConfigurationException; import net.jini.core.constraint.MethodConstraints; | import java.util.logging.*; import net.jini.config.*; import net.jini.core.constraint.*; | [
"java.util",
"net.jini.config",
"net.jini.core"
] | java.util; net.jini.config; net.jini.core; | 1,650,278 |
public static List<AttackResult> calculateMagicalAttackResult(Creature attacker, Creature attacked, SkillElement elem) {
List<AttackResult> attackList = new ArrayList<AttackResult>();
int damage = StatFunctions.calculateAttackDamage(attacker, attacked, true, elem);
AttackStatus status = calculateMagicalStatus(attacker, attacked, 100, false);
if (status == AttackStatus.CRITICAL) {
damage = (int) calculateWeaponCritical(attacked, damage, ((Player) attacker).getEquipment().getMainHandWeaponType(), StatEnum.MAGICAL_CRITICAL_DAMAGE_REDUCE);
}
damage = (int) StatFunctions.adjustDamages(attacker, attacked, damage, 0, false);
if (damage <= 0) {
damage = 1;
} switch (status) {
case RESIST:
case CRITICAL_RESIST:
damage = 0;
break;
default:
break;
}
attackList.add(new AttackResult(damage, status));
if (attacker instanceof Player && ((Player) attacker).getEquipment().getOffHandWeaponType() != null) {
int offHandDamage = StatFunctions.calculateAttackDamage(attacker, attacked, false, elem);
AttackStatus offHandStatus = calculateMagicalStatus(attacker, attacked, 100, false);
if (offHandStatus == AttackStatus.CRITICAL) {
offHandDamage = (int) calculateWeaponCritical(attacked, damage, ((Player) attacker).getEquipment().getMainHandWeaponType(), StatEnum.MAGICAL_CRITICAL_DAMAGE_REDUCE);
}
offHandDamage = (int) StatFunctions.adjustDamages(attacker, attacked, damage, 0, false);
if (offHandDamage <= 0) {
offHandDamage = 1;
} switch (offHandStatus) {
case RESIST:
case CRITICAL_RESIST:
offHandDamage = 0;
break;
default:
break;
}
attackList.add(new AttackResult(offHandDamage, status));
}
attacked.getObserveController().checkShieldStatus(attackList, null, attacker);
return attackList;
}
| static List<AttackResult> function(Creature attacker, Creature attacked, SkillElement elem) { List<AttackResult> attackList = new ArrayList<AttackResult>(); int damage = StatFunctions.calculateAttackDamage(attacker, attacked, true, elem); AttackStatus status = calculateMagicalStatus(attacker, attacked, 100, false); if (status == AttackStatus.CRITICAL) { damage = (int) calculateWeaponCritical(attacked, damage, ((Player) attacker).getEquipment().getMainHandWeaponType(), StatEnum.MAGICAL_CRITICAL_DAMAGE_REDUCE); } damage = (int) StatFunctions.adjustDamages(attacker, attacked, damage, 0, false); if (damage <= 0) { damage = 1; } switch (status) { case RESIST: case CRITICAL_RESIST: damage = 0; break; default: break; } attackList.add(new AttackResult(damage, status)); if (attacker instanceof Player && ((Player) attacker).getEquipment().getOffHandWeaponType() != null) { int offHandDamage = StatFunctions.calculateAttackDamage(attacker, attacked, false, elem); AttackStatus offHandStatus = calculateMagicalStatus(attacker, attacked, 100, false); if (offHandStatus == AttackStatus.CRITICAL) { offHandDamage = (int) calculateWeaponCritical(attacked, damage, ((Player) attacker).getEquipment().getMainHandWeaponType(), StatEnum.MAGICAL_CRITICAL_DAMAGE_REDUCE); } offHandDamage = (int) StatFunctions.adjustDamages(attacker, attacked, damage, 0, false); if (offHandDamage <= 0) { offHandDamage = 1; } switch (offHandStatus) { case RESIST: case CRITICAL_RESIST: offHandDamage = 0; break; default: break; } attackList.add(new AttackResult(offHandDamage, status)); } attacked.getObserveController().checkShieldStatus(attackList, null, attacker); return attackList; } | /**
* Calculate Magical attack status and damage
*/ | Calculate Magical attack status and damage | calculateMagicalAttackResult | {
"repo_name": "Estada1401/anuwhscript",
"path": "GameServer/src/com/aionemu/gameserver/controllers/attack/AttackUtil.java",
"license": "gpl-3.0",
"size": 37122
} | [
"com.aionemu.gameserver.model.SkillElement",
"com.aionemu.gameserver.model.gameobjects.Creature",
"com.aionemu.gameserver.model.gameobjects.player.Player",
"com.aionemu.gameserver.model.stats.container.StatEnum",
"com.aionemu.gameserver.utils.stats.StatFunctions",
"java.util.ArrayList",
"java.util.List"
] | import com.aionemu.gameserver.model.SkillElement; import com.aionemu.gameserver.model.gameobjects.Creature; import com.aionemu.gameserver.model.gameobjects.player.Player; import com.aionemu.gameserver.model.stats.container.StatEnum; import com.aionemu.gameserver.utils.stats.StatFunctions; import java.util.ArrayList; import java.util.List; | import com.aionemu.gameserver.model.*; import com.aionemu.gameserver.model.gameobjects.*; import com.aionemu.gameserver.model.gameobjects.player.*; import com.aionemu.gameserver.model.stats.container.*; import com.aionemu.gameserver.utils.stats.*; import java.util.*; | [
"com.aionemu.gameserver",
"java.util"
] | com.aionemu.gameserver; java.util; | 178,934 |
public void types() {
removeAppliesToType(DatabaseType.OTHERFEATURES);
removeAppliesToType(DatabaseType.ESTGENE);
removeAppliesToType(DatabaseType.VEGA);
removeAppliesToType(DatabaseType.RNASEQ);
} | void function() { removeAppliesToType(DatabaseType.OTHERFEATURES); removeAppliesToType(DatabaseType.ESTGENE); removeAppliesToType(DatabaseType.VEGA); removeAppliesToType(DatabaseType.RNASEQ); } | /**
* This only really applies to core databases
*/ | This only really applies to core databases | types | {
"repo_name": "dbolser-ebi/ensj-healthcheck",
"path": "src/org/ensembl/healthcheck/testcase/generic/BigGeneExon.java",
"license": "apache-2.0",
"size": 6353
} | [
"org.ensembl.healthcheck.DatabaseType"
] | import org.ensembl.healthcheck.DatabaseType; | import org.ensembl.healthcheck.*; | [
"org.ensembl.healthcheck"
] | org.ensembl.healthcheck; | 1,440,923 |
@Test
public void whenClientTimeoutDetectedThenBuildIsInterrupted()
throws InterruptedException, IOException {
// Sub process interruption not supported on Windows.
assumeTrue(Platform.detect() != Platform.WINDOWS);
final long timeoutMillis = 100;
final long intervalMillis = timeoutMillis * 2; // Interval > timeout to trigger disconnection.
final ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "exclusive_execution", tmp);
workspace.setUp();
// Build an NGContext connected to an NGInputStream reading from stream that will timeout.
try (TestContext context =
new TestContext(
ImmutableMap.copyOf(System.getenv()),
TestContext.createHeartBeatStream(intervalMillis),
timeoutMillis)) {
ProcessResult result = workspace.runBuckdCommand(context, "build", "//:sleep");
result.assertFailure();
assertThat(result.getStderr(), containsString("InterruptedException"));
}
} | void function() throws InterruptedException, IOException { assumeTrue(Platform.detect() != Platform.WINDOWS); final long timeoutMillis = 100; final long intervalMillis = timeoutMillis * 2; final ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, STR, tmp); workspace.setUp(); try (TestContext context = new TestContext( ImmutableMap.copyOf(System.getenv()), TestContext.createHeartBeatStream(intervalMillis), timeoutMillis)) { ProcessResult result = workspace.runBuckdCommand(context, "build", STRInterruptedException")); } } | /**
* This verifies that a client timeout will be detected by a Nailgun NGInputStream reading from an
* empty heartbeat stream and that the generated InterruptedException will cause command execution
* to fail after timeout.
*/ | This verifies that a client timeout will be detected by a Nailgun NGInputStream reading from an empty heartbeat stream and that the generated InterruptedException will cause command execution to fail after timeout | whenClientTimeoutDetectedThenBuildIsInterrupted | {
"repo_name": "ilya-klyuchnikov/buck",
"path": "test/com/facebook/buck/cli/DaemonIntegrationTest.java",
"license": "apache-2.0",
"size": 25671
} | [
"com.facebook.buck.testutil.ProcessResult",
"com.facebook.buck.testutil.integration.ProjectWorkspace",
"com.facebook.buck.testutil.integration.TestContext",
"com.facebook.buck.testutil.integration.TestDataHelper",
"com.facebook.buck.util.environment.Platform",
"com.google.common.collect.ImmutableMap",
"java.io.IOException",
"org.junit.Assume"
] | import com.facebook.buck.testutil.ProcessResult; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TestContext; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.util.environment.Platform; import com.google.common.collect.ImmutableMap; import java.io.IOException; import org.junit.Assume; | import com.facebook.buck.testutil.*; import com.facebook.buck.testutil.integration.*; import com.facebook.buck.util.environment.*; import com.google.common.collect.*; import java.io.*; import org.junit.*; | [
"com.facebook.buck",
"com.google.common",
"java.io",
"org.junit"
] | com.facebook.buck; com.google.common; java.io; org.junit; | 1,922,393 |
public MultiCDSAnalytic makeMultiCDS(final LocalDate tradeDate, final LocalDate stepinDate, final LocalDate cashSettlementDate, final LocalDate accStartDate,
final LocalDate maturityReferanceDate,
final int[] maturityIndexes) {
return new MultiCDSAnalytic(tradeDate, stepinDate, cashSettlementDate, accStartDate, maturityReferanceDate, maturityIndexes, _payAccOnDefault,
_couponIntervalTenor, _stubType, _protectStart,
_recoveryRate, _businessdayAdjustmentConvention, DEFAULT_CALENDAR, _accrualDayCount, _curveDayCount);
} | MultiCDSAnalytic function(final LocalDate tradeDate, final LocalDate stepinDate, final LocalDate cashSettlementDate, final LocalDate accStartDate, final LocalDate maturityReferanceDate, final int[] maturityIndexes) { return new MultiCDSAnalytic(tradeDate, stepinDate, cashSettlementDate, accStartDate, maturityReferanceDate, maturityIndexes, _payAccOnDefault, _couponIntervalTenor, _stubType, _protectStart, _recoveryRate, _businessdayAdjustmentConvention, DEFAULT_CALENDAR, _accrualDayCount, _curveDayCount); } | /**
* Make a set of CDS represented as a MultiCDSAnalytic instance.
*
* @param tradeDate
* The trade date
* @param stepinDate
* (aka Protection Effective sate or assignment date). Date when party assumes ownership. This is usually T+1. This is when protection (and risk)
* starts in terms of the model. Note, this is sometimes just called the Effective Date, however this can cause confusion with the legal effective
* date which is T-60 or T-90.
* @param cashSettlementDate
* The valuation date. The date that values are PVed to. Is is normally today + 3 business days. Aka cash-settle date.
* @param accStartDate
* This is when the CDS nominally starts in terms of premium payments. i.e. the number of days in the first period (and thus the amount of the first
* premium payment) is counted from this date.
* @param maturityReferanceDate
* A reference date that maturities are measured from. For standard CDSSs, this is the next IMM date after the trade date, so the actually maturities
* will be some fixed periods after this.
* @param maturityIndexes
* The maturities are fixed integer multiples of the payment interval, so for 6M, 1Y and 2Y tenors with a 3M payment interval, would require 2, 4,
* and 8 as the indices
* @return A set of CDS represented as a MultiCDSAnalytic
*/ | Make a set of CDS represented as a MultiCDSAnalytic instance | makeMultiCDS | {
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/credit/isdastandardmodel/CDSAnalyticFactory.java",
"license": "apache-2.0",
"size": 43176
} | [
"org.threeten.bp.LocalDate"
] | import org.threeten.bp.LocalDate; | import org.threeten.bp.*; | [
"org.threeten.bp"
] | org.threeten.bp; | 576,915 |
private void generalAuthenticateMapNonce() throws Exception {
byte[] pkMapPCD = null;
PACEMapping mapping = cryptoSuite.getMapping();
if (mapping instanceof PACEGenericMapping) {
PACEGenericMapping gm = (PACEGenericMapping) mapping;
pkMapPCD = gm.getMappingKey().getEncodedPublicKey();
} else if (mapping instanceof PACEIntegratedMapping) {
throw new UnsupportedOperationException("Not implemented yet.");
}
CardCommandAPDU gaMapNonce = new GeneralAuthenticate((byte) 0x81, pkMapPCD);
gaMapNonce.setChaining();
try {
response = gaMapNonce.transmit(dispatcher, slotHandle);
} catch (APDUException e) {
logger.error(e.getMessage(), e);
throw new ProtocolException(e.getResult());
}
if (mapping instanceof PACEGenericMapping) {
PACEGenericMapping gm = (PACEGenericMapping) mapping;
PACEKey keyMapPICC = new PACEKey(domainParameter);
keyMapPICC.decodePublicKey(response.getData());
byte[] pkMapPICC = keyMapPICC.getEncodedPublicKey();
if (ByteUtils.compare(pkMapPICC, pkMapPCD)) {
throw new GeneralSecurityException("PACE security violation: equal keys");
}
domainParameter = gm.map(pkMapPICC, s);
} else if (mapping instanceof PACEIntegratedMapping) {
throw new UnsupportedOperationException("Not implemented yet.");
}
// Continue with Step 4
generalAuthenticateKeyAgreement();
} | void function() throws Exception { byte[] pkMapPCD = null; PACEMapping mapping = cryptoSuite.getMapping(); if (mapping instanceof PACEGenericMapping) { PACEGenericMapping gm = (PACEGenericMapping) mapping; pkMapPCD = gm.getMappingKey().getEncodedPublicKey(); } else if (mapping instanceof PACEIntegratedMapping) { throw new UnsupportedOperationException(STR); } CardCommandAPDU gaMapNonce = new GeneralAuthenticate((byte) 0x81, pkMapPCD); gaMapNonce.setChaining(); try { response = gaMapNonce.transmit(dispatcher, slotHandle); } catch (APDUException e) { logger.error(e.getMessage(), e); throw new ProtocolException(e.getResult()); } if (mapping instanceof PACEGenericMapping) { PACEGenericMapping gm = (PACEGenericMapping) mapping; PACEKey keyMapPICC = new PACEKey(domainParameter); keyMapPICC.decodePublicKey(response.getData()); byte[] pkMapPICC = keyMapPICC.getEncodedPublicKey(); if (ByteUtils.compare(pkMapPICC, pkMapPCD)) { throw new GeneralSecurityException(STR); } domainParameter = gm.map(pkMapPICC, s); } else if (mapping instanceof PACEIntegratedMapping) { throw new UnsupportedOperationException(STR); } generalAuthenticateKeyAgreement(); } | /**
* Step 3: Mapping nonce
*/ | Step 3: Mapping nonce | generalAuthenticateMapNonce | {
"repo_name": "adelapie/open-ecard-IRMA",
"path": "ifd/ifd-protocols/pace/src/main/java/org/openecard/ifd/protocol/pace/PACEImplementation.java",
"license": "apache-2.0",
"size": 12891
} | [
"java.security.GeneralSecurityException",
"org.openecard.common.apdu.GeneralAuthenticate",
"org.openecard.common.apdu.common.CardCommandAPDU",
"org.openecard.common.apdu.exception.APDUException",
"org.openecard.common.ifd.protocol.exception.ProtocolException",
"org.openecard.common.util.ByteUtils",
"org.openecard.ifd.protocol.pace.crypto.PACEGenericMapping",
"org.openecard.ifd.protocol.pace.crypto.PACEIntegratedMapping",
"org.openecard.ifd.protocol.pace.crypto.PACEKey",
"org.openecard.ifd.protocol.pace.crypto.PACEMapping"
] | import java.security.GeneralSecurityException; import org.openecard.common.apdu.GeneralAuthenticate; import org.openecard.common.apdu.common.CardCommandAPDU; import org.openecard.common.apdu.exception.APDUException; import org.openecard.common.ifd.protocol.exception.ProtocolException; import org.openecard.common.util.ByteUtils; import org.openecard.ifd.protocol.pace.crypto.PACEGenericMapping; import org.openecard.ifd.protocol.pace.crypto.PACEIntegratedMapping; import org.openecard.ifd.protocol.pace.crypto.PACEKey; import org.openecard.ifd.protocol.pace.crypto.PACEMapping; | import java.security.*; import org.openecard.common.apdu.*; import org.openecard.common.apdu.common.*; import org.openecard.common.apdu.exception.*; import org.openecard.common.ifd.protocol.exception.*; import org.openecard.common.util.*; import org.openecard.ifd.protocol.pace.crypto.*; | [
"java.security",
"org.openecard.common",
"org.openecard.ifd"
] | java.security; org.openecard.common; org.openecard.ifd; | 387,474 |
private static boolean instanceHasAllVolumesWithState(InstanceEbsVolumes instanceEbsVolumes,
VolumeState expectedState) {
int volumeCount = instanceEbsVolumes.getVolumeStates().size();
Set<String> volumesWithExpectedState =
getAllVolumeIdsWithState(Collections.singletonList(instanceEbsVolumes), expectedState);
return volumeCount == volumesWithExpectedState.size();
} | static boolean function(InstanceEbsVolumes instanceEbsVolumes, VolumeState expectedState) { int volumeCount = instanceEbsVolumes.getVolumeStates().size(); Set<String> volumesWithExpectedState = getAllVolumeIdsWithState(Collections.singletonList(instanceEbsVolumes), expectedState); return volumeCount == volumesWithExpectedState.size(); } | /**
* Returns true if all volumes for an instance has the expected status.
*/ | Returns true if all volumes for an instance has the expected status | instanceHasAllVolumesWithState | {
"repo_name": "cloudera/director-aws-plugin",
"path": "provider/src/main/java/com/cloudera/director/aws/ec2/ebs/EBSAllocator.java",
"license": "apache-2.0",
"size": 31207
} | [
"com.amazonaws.services.ec2.model.VolumeState",
"java.util.Collections",
"java.util.Set"
] | import com.amazonaws.services.ec2.model.VolumeState; import java.util.Collections; import java.util.Set; | import com.amazonaws.services.ec2.model.*; import java.util.*; | [
"com.amazonaws.services",
"java.util"
] | com.amazonaws.services; java.util; | 716,462 |
public static String getLinkText(String before,String after) {
// 0 e.g. [[:Image:Wiktionary-logo-gl.png|a logo that depicts a dictionary]]
if( 0 == before.length()
|| existsCode(before) )
{
if(-1 != after.indexOf('|')) {
return StringUtil.getTextAfterFirstVerticalPipe(after);
} else if(-1 != after.indexOf(':')) {
// e.g. [[:de:Hauptseite]], after = "de:Hauptseite"
String lang_code = StringUtil.getTextBeforeFirstColumn(after);
if(LanguageType.has(lang_code)) {
return StringUtil.getTextAfterFirstColumn(after);
}
return after;
}
}
return new StringBuffer(before).append(":").append(after).toString();
} | static String function(String before,String after) { if( 0 == before.length() existsCode(before) ) { if(-1 != after.indexOf(' ')) { return StringUtil.getTextAfterFirstVerticalPipe(after); } else if(-1 != after.indexOf(':')) { String lang_code = StringUtil.getTextBeforeFirstColumn(after); if(LanguageType.has(lang_code)) { return StringUtil.getTextAfterFirstColumn(after); } return after; } } return new StringBuffer(before).append(":").append(after).toString(); } | /** Gets texts of interwikimedia link. E.g.
* [[wikt:Wiktionary:Statistics#Detail|statistics]] -> "statistics"
*
* @param before the text before the first column, e.g. "wikipedia" in [[wikipedia:]]
* @param after e.g. "Wikipedia:Wikimedia_sister_projects" in [[w:Wikipedia:Wikimedia_sister_projects]]
*/ | Gets texts of interwikimedia link. E.g. [[wikt:Wiktionary:Statistics#Detail|statistics]] -> "statistics" | getLinkText | {
"repo_name": "dkpro/dkpro-jwktl",
"path": "src/main/java/de/tudarmstadt/ukp/jwktl/parser/ru/wikokit/base/wikipedia/language/WikimediaSisterProject.java",
"license": "apache-2.0",
"size": 7108
} | [
"de.tudarmstadt.ukp.jwktl.parser.ru.wikokit.base.wikipedia.util.StringUtil"
] | import de.tudarmstadt.ukp.jwktl.parser.ru.wikokit.base.wikipedia.util.StringUtil; | import de.tudarmstadt.ukp.jwktl.parser.ru.wikokit.base.wikipedia.util.*; | [
"de.tudarmstadt.ukp"
] | de.tudarmstadt.ukp; | 1,484,274 |
public void dialog(HttpServletRequest request, HttpServletResponse response
, BasePage basePage
, String language
) throws IOException {
if (true) { // try {
PrintWriter out = basePage.writeHeader(request, response, language);
out.write("<title>" + basePage.getAppName() + " Main Page</title>\n");
out.write("</head>\n<body>\n");
out.write("<h3>Common Version " + (new MetaInfPage()).getVersionString(this, "common") + "<h3>\n");
out.write("<p>This project collects a series of classes and "
+ "methods which are useful in several subprojects.</p>\n");
out.write("<h3>Parameter Test</h3>\n");
out.write("<form action=\"servlet\" method=\"POST\" enctype=\"multipart/form-data\">\n");
out.write("<input name=\"view\" type=\"hidden\" value=\"index\" />\n");
// first all normal fields
out.write("<h4>Form Fields</h4>\n");
String key = null;
int ifld = 0;
Iterator<String> fiter = basePage.getFormIterator();
while (fiter.hasNext()) {
key = fiter.next();
String value = basePage.getFormField(key);
out.write("[field" + String.valueOf(ifld) + "] " + key
+ ": <input name=\"" + key + "\" type=\"text\" size=\"16\" value=\""
+ value + "\" /><br />\n");
ifld ++;
if (value.equals("null")) {
throw new IOException("artificial null pointer exception");
}
} // while fiter
// then the uploaded files
out.write("<h4>Form Files</h4>\n");
int fileCount = basePage.getFormFileCount();
int
ifile = 0;
while (ifile < 2) {
if (ifile < fileCount) {
FileItem fitem = basePage.getFormFile(ifile);
File location = ((DiskFileItem) fitem).getStoreLocation();
// out.write("<!-- fitem is located in " + location.getAbsolutePath() + " -->\n");
key = fitem.getName();
} else {
key = "file" + String.valueOf(ifile);
}
out.write("[file" + String.valueOf(ifile) + "]<em> " + key
+ "</em>: <input name=\"" + key + "\" type=\"file\" size=\"16\""
+ "\" style=\"font-family: Courier, monospace\"/><br />\n");
ifile ++;
} // while ifile
out.write(" <input type=\"submit\" value=\"Submit\"><br />\n");
out.write("</form>\n");
ifile = 0;
while (ifile < fileCount) {
FileItem fitem = basePage.getFormFile(ifile);
key = fitem.getName();
out.write("<h4>Content of File " + String.valueOf(ifile) + ":<em>" + key +"</em></h4>\n");
out.write(fitem.getString() + "\n</pre>\n");
ifile ++;
} // while ifile
out.write(basePage.getOtherAuxiliaryLinks(language, "main"));
basePage.writeTrailer(language, "quest");
}
} // dialog
//================
// Main method
//================ | void function(HttpServletRequest request, HttpServletResponse response , BasePage basePage , String language ) throws IOException { if (true) { PrintWriter out = basePage.writeHeader(request, response, language); out.write(STR + basePage.getAppName() + STR); out.write(STR); out.write(STR + (new MetaInfPage()).getVersionString(this, STR) + STR); out.write(STR + STR); out.write(STR); out.write(STRservlet\STRPOST\STRmultipart/form-data\">\n"); out.write(STRview\STRhidden\STRindex\STR); out.write(STR); String key = null; int ifld = 0; Iterator<String> fiter = basePage.getFormIterator(); while (fiter.hasNext()) { key = fiter.next(); String value = basePage.getFormField(key); out.write(STR + String.valueOf(ifld) + STR + key + STRSTR\STRtext\STR16\STRSTR\STR); ifld ++; if (value.equals("null")) { throw new IOException(STR); } } out.write(STR); int fileCount = basePage.getFormFileCount(); int ifile = 0; while (ifile < 2) { if (ifile < fileCount) { FileItem fitem = basePage.getFormFile(ifile); File location = ((DiskFileItem) fitem).getStoreLocation(); key = fitem.getName(); } else { key = "file" + String.valueOf(ifile); } out.write("[file" + String.valueOf(ifile) + "]<em> STR</em>: <input name=\"STR\STRfile\STR16\STR\STRfont-family: Courier, monospace\STR); ifile ++; } out.write(STRsubmit\STRSubmit\STR); out.write(STR); ifile = 0; while (ifile < fileCount) { FileItem fitem = basePage.getFormFile(ifile); key = fitem.getName(); out.write(STR + String.valueOf(ifile) + ":<em>" + key +STR); out.write(fitem.getString() + STR); ifile ++; } out.write(basePage.getOtherAuxiliaryLinks(language, "main")); basePage.writeTrailer(language, "quest"); } } | /** Output the main dialog page for RaMath
* @param request request with header fields
* @param response response with writer
* @param basePage refrence to common methods and error messages
* @param language 2-letter code en, de etc.
* @throws IOException if an IO error occurs
*/ | Output the main dialog page for RaMath | dialog | {
"repo_name": "gfis/common",
"path": "src/main/java/org/teherba/common/priv/IndexPage.java",
"license": "apache-2.0",
"size": 5862
} | [
"java.io.File",
"java.io.IOException",
"java.io.PrintWriter",
"java.util.Iterator",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"org.apache.commons.fileupload.FileItem",
"org.apache.commons.fileupload.disk.DiskFileItem",
"org.teherba.common.web.BasePage",
"org.teherba.common.web.MetaInfPage"
] | import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.Iterator; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.disk.DiskFileItem; import org.teherba.common.web.BasePage; import org.teherba.common.web.MetaInfPage; | import java.io.*; import java.util.*; import javax.servlet.http.*; import org.apache.commons.fileupload.*; import org.apache.commons.fileupload.disk.*; import org.teherba.common.web.*; | [
"java.io",
"java.util",
"javax.servlet",
"org.apache.commons",
"org.teherba.common"
] | java.io; java.util; javax.servlet; org.apache.commons; org.teherba.common; | 2,893,663 |
@Test
public void testQueriesOnReplicatedRegionWithNullProjAttr() {
Cache cache = CacheUtils.getCache();
createLocalRegionWithNullValues();
assertNotNull(cache.getRegion(regionName));
assertEquals(numElem * 2, cache.getRegion(regionName).size());
QueryService queryService = cache.getQueryService();
Query query1 = null;
try {
for (String queryStr : moreQueries) {
query1 = queryService.newQuery(queryStr);
SelectResults result1 = (SelectResults) query1.execute();
cache.getLogger().fine(result1.asList().toString());
assertEquals(queryStr, numElem, result1.size());
verifyDistinctResults(result1);
}
} catch (Exception e) {
e.printStackTrace();
fail("Query " + query1 + " Execution Failed!");
}
// Destroy current Region for other tests
cache.getRegion(regionName).destroyRegion();
} | void function() { Cache cache = CacheUtils.getCache(); createLocalRegionWithNullValues(); assertNotNull(cache.getRegion(regionName)); assertEquals(numElem * 2, cache.getRegion(regionName).size()); QueryService queryService = cache.getQueryService(); Query query1 = null; try { for (String queryStr : moreQueries) { query1 = queryService.newQuery(queryStr); SelectResults result1 = (SelectResults) query1.execute(); cache.getLogger().fine(result1.asList().toString()); assertEquals(queryStr, numElem, result1.size()); verifyDistinctResults(result1); } } catch (Exception e) { e.printStackTrace(); fail(STR + query1 + STR); } cache.getRegion(regionName).destroyRegion(); } | /**
* Test on Replicated Region data
*/ | Test on Replicated Region data | testQueriesOnReplicatedRegionWithNullProjAttr | {
"repo_name": "ysung-pivotal/incubator-geode",
"path": "gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/DistinctResultsWithDupValuesInRegionJUnitTest.java",
"license": "apache-2.0",
"size": 15389
} | [
"com.gemstone.gemfire.cache.Cache",
"com.gemstone.gemfire.cache.query.CacheUtils",
"com.gemstone.gemfire.cache.query.Query",
"com.gemstone.gemfire.cache.query.QueryService",
"com.gemstone.gemfire.cache.query.SelectResults",
"org.junit.Assert"
] | import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.query.CacheUtils; import com.gemstone.gemfire.cache.query.Query; import com.gemstone.gemfire.cache.query.QueryService; import com.gemstone.gemfire.cache.query.SelectResults; import org.junit.Assert; | import com.gemstone.gemfire.cache.*; import com.gemstone.gemfire.cache.query.*; import org.junit.*; | [
"com.gemstone.gemfire",
"org.junit"
] | com.gemstone.gemfire; org.junit; | 9,151 |
public static void main(final String[] args) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception e) {
e.printStackTrace();
}
if (args != null && args.length > 0) {
// a default url is given
new CaptureClientGui(args[0]);
} else {
new CaptureClientGui();
}
}
| static void function(final String[] args) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { e.printStackTrace(); } if (args != null && args.length > 0) { new CaptureClientGui(args[0]); } else { new CaptureClientGui(); } } | /**
* Instantiates a new CaptureClientGui using a look-and-feel that matches
* the operating system.
*
* @param args
* The address to which the CaptureClient should send the capture
* events. If omitted, a default address will be provided.
*/ | Instantiates a new CaptureClientGui using a look-and-feel that matches the operating system | main | {
"repo_name": "tavlima/fosstrak-epcis",
"path": "epcis-captureclient/src/main/java/org/fosstrak/epcis/captureclient/CaptureClientGui.java",
"license": "lgpl-2.1",
"size": 40962
} | [
"javax.swing.UIManager"
] | import javax.swing.UIManager; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 1,423,877 |
private String getCurrentClub(String data){
try{
data = data.replace("/", "_");
Log.d(data, data);
return new HTTPAsyncTask().execute(serverAddress + "/clubs/" + data, "GET").get();
}
catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
return null;
} | String function(String data){ try{ data = data.replace("/", "_"); Log.d(data, data); return new HTTPAsyncTask().execute(serverAddress + STR + data, "GET").get(); } catch (InterruptedException ExecutionException e) { e.printStackTrace(); } return null; } | /**
* Used by DisplayClub and PostingActivity to fetch the club selected by the user in the previous view.
* @param data to get
* @return current club
*/ | Used by DisplayClub and PostingActivity to fetch the club selected by the user in the previous view | getCurrentClub | {
"repo_name": "umdcs/umdalive",
"path": "app/src/main/java/com/example/kevin/umdalive/Models/RestModel.java",
"license": "gpl-3.0",
"size": 8506
} | [
"android.util.Log",
"java.util.concurrent.ExecutionException"
] | import android.util.Log; import java.util.concurrent.ExecutionException; | import android.util.*; import java.util.concurrent.*; | [
"android.util",
"java.util"
] | android.util; java.util; | 234,182 |
public ReplicaState getCurrentReplicaState() {
return currentReplicaState;
}
} | ReplicaState function() { return currentReplicaState; } } | /**
* Get the state of the current replica.
* The state corresponds to the replica returned
* by the latest {@link #next()}.
*/ | Get the state of the current replica. The state corresponds to the replica returned by the latest <code>#next()</code> | getCurrentReplicaState | {
"repo_name": "gabrielborgesmagalhaes/hadoop-hdfs",
"path": "src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java",
"license": "apache-2.0",
"size": 9173
} | [
"org.apache.hadoop.hdfs.server.common.HdfsConstants"
] | import org.apache.hadoop.hdfs.server.common.HdfsConstants; | import org.apache.hadoop.hdfs.server.common.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,130,839 |
public static boolean cantStandAtWater(Block block)
{
Block otherBlock = block.getRelative(BlockFace.DOWN);
boolean isHover = block.getType() == Material.AIR;
boolean n = otherBlock.getRelative(BlockFace.NORTH).getType() == Material.WATER;
boolean s = otherBlock.getRelative(BlockFace.SOUTH).getType() == Material.WATER;
boolean e = otherBlock.getRelative(BlockFace.EAST).getType() == Material.WATER;
boolean w = otherBlock.getRelative(BlockFace.WEST).getType() == Material.WATER;
boolean ne = otherBlock.getRelative(BlockFace.NORTH_EAST).getType() == Material.WATER;
boolean nw = otherBlock.getRelative(BlockFace.NORTH_WEST).getType() == Material.WATER;
boolean se = otherBlock.getRelative(BlockFace.SOUTH_EAST).getType() == Material.WATER;
boolean sw = otherBlock.getRelative(BlockFace.SOUTH_WEST).getType() == Material.WATER;
return(n && s && e && w && ne && nw && se && sw && isHover);
} | static boolean function(Block block) { Block otherBlock = block.getRelative(BlockFace.DOWN); boolean isHover = block.getType() == Material.AIR; boolean n = otherBlock.getRelative(BlockFace.NORTH).getType() == Material.WATER; boolean s = otherBlock.getRelative(BlockFace.SOUTH).getType() == Material.WATER; boolean e = otherBlock.getRelative(BlockFace.EAST).getType() == Material.WATER; boolean w = otherBlock.getRelative(BlockFace.WEST).getType() == Material.WATER; boolean ne = otherBlock.getRelative(BlockFace.NORTH_EAST).getType() == Material.WATER; boolean nw = otherBlock.getRelative(BlockFace.NORTH_WEST).getType() == Material.WATER; boolean se = otherBlock.getRelative(BlockFace.SOUTH_EAST).getType() == Material.WATER; boolean sw = otherBlock.getRelative(BlockFace.SOUTH_WEST).getType() == Material.WATER; return(n && s && e && w && ne && nw && se && sw && isHover); } | /**
* Eh, I got lazy; sue me.
* TODO: Improve
* @param block
* @return
*/ | Eh, I got lazy; sue me | cantStandAtWater | {
"repo_name": "m1enkrafftman/AntiCheatPlus",
"path": "src/main/java/net/dynamicdev/anticheat/util/Utilities.java",
"license": "gpl-3.0",
"size": 26825
} | [
"org.bukkit.Material",
"org.bukkit.block.Block",
"org.bukkit.block.BlockFace"
] | import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; | import org.bukkit.*; import org.bukkit.block.*; | [
"org.bukkit",
"org.bukkit.block"
] | org.bukkit; org.bukkit.block; | 935,468 |
static QueryBuilder parseAliasFilter(Function<XContentParser, QueryParseContext> contextFactory,
IndexMetaData metaData, String... aliasNames) {
if (aliasNames == null || aliasNames.length == 0) {
return null;
}
Index index = metaData.getIndex();
ImmutableOpenMap<String, AliasMetaData> aliases = metaData.getAliases();
Function<AliasMetaData, QueryBuilder> parserFunction = (alias) -> {
if (alias.filter() == null) {
return null;
}
try {
byte[] filterSource = alias.filter().uncompressed();
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
return contextFactory.apply(parser).parseInnerQueryBuilder();
}
} catch (IOException ex) {
throw new AliasFilterParsingException(index, alias.getAlias(), "Invalid alias filter", ex);
}
};
if (aliasNames.length == 1) {
AliasMetaData alias = aliases.get(aliasNames[0]);
if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter");
}
return parserFunction.apply(alias);
} else {
// we need to bench here a bit, to see maybe it makes sense to use OrFilter
BoolQueryBuilder combined = new BoolQueryBuilder();
for (String aliasName : aliasNames) {
AliasMetaData alias = aliases.get(aliasName);
if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(index, aliasNames[0],
"Unknown alias name was passed to alias Filter");
}
QueryBuilder parsedFilter = parserFunction.apply(alias);
if (parsedFilter != null) {
combined.should(parsedFilter);
} else {
// The filter might be null only if filter was removed after filteringAliases was called
return null;
}
}
return combined;
}
} | static QueryBuilder parseAliasFilter(Function<XContentParser, QueryParseContext> contextFactory, IndexMetaData metaData, String... aliasNames) { if (aliasNames == null aliasNames.length == 0) { return null; } Index index = metaData.getIndex(); ImmutableOpenMap<String, AliasMetaData> aliases = metaData.getAliases(); Function<AliasMetaData, QueryBuilder> parserFunction = (alias) -> { if (alias.filter() == null) { return null; } try { byte[] filterSource = alias.filter().uncompressed(); try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { return contextFactory.apply(parser).parseInnerQueryBuilder(); } } catch (IOException ex) { throw new AliasFilterParsingException(index, alias.getAlias(), STR, ex); } }; if (aliasNames.length == 1) { AliasMetaData alias = aliases.get(aliasNames[0]); if (alias == null) { throw new InvalidAliasNameException(index, aliasNames[0], STR); } return parserFunction.apply(alias); } else { BoolQueryBuilder combined = new BoolQueryBuilder(); for (String aliasName : aliasNames) { AliasMetaData alias = aliases.get(aliasName); if (alias == null) { throw new InvalidAliasNameException(index, aliasNames[0], STR); } QueryBuilder parsedFilter = parserFunction.apply(alias); if (parsedFilter != null) { combined.should(parsedFilter); } else { return null; } } return combined; } } | /**
* Returns the filter associated with listed filtering aliases.
* <p>
* The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
* Returns <tt>null</tt> if no filtering is required.</p>
*/ | Returns the filter associated with listed filtering aliases. The list of filtering aliases should be obtained by calling MetaData.filteringAliases. Returns null if no filtering is required | parseAliasFilter | {
"repo_name": "MaineC/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java",
"license": "apache-2.0",
"size": 5712
} | [
"java.io.IOException",
"java.util.function.Function",
"org.elasticsearch.cluster.metadata.AliasMetaData",
"org.elasticsearch.cluster.metadata.IndexMetaData",
"org.elasticsearch.common.collect.ImmutableOpenMap",
"org.elasticsearch.common.xcontent.XContentFactory",
"org.elasticsearch.common.xcontent.XContentParser",
"org.elasticsearch.index.Index",
"org.elasticsearch.index.query.BoolQueryBuilder",
"org.elasticsearch.index.query.QueryBuilder",
"org.elasticsearch.index.query.QueryParseContext",
"org.elasticsearch.indices.AliasFilterParsingException",
"org.elasticsearch.indices.InvalidAliasNameException"
] | import java.io.IOException; import java.util.function.Function; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; | import java.io.*; import java.util.function.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.common.collect.*; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.*; import org.elasticsearch.index.query.*; import org.elasticsearch.indices.*; | [
"java.io",
"java.util",
"org.elasticsearch.cluster",
"org.elasticsearch.common",
"org.elasticsearch.index",
"org.elasticsearch.indices"
] | java.io; java.util; org.elasticsearch.cluster; org.elasticsearch.common; org.elasticsearch.index; org.elasticsearch.indices; | 249,285 |
private void readCentralDirectoryFileHeaders(long fileOffset) throws IOException {
CountingInputStream centralDirectory = new CountingInputStream(getStreamAt(fileOffset));
while (centralDirectory.getCount() < zipData.getCentralDirectorySize()) {
ZipFileEntry entry = CentralDirectoryFileHeader.read(centralDirectory, zipData.getCharset());
zipData.addEntry(entry);
}
} | void function(long fileOffset) throws IOException { CountingInputStream centralDirectory = new CountingInputStream(getStreamAt(fileOffset)); while (centralDirectory.getCount() < zipData.getCentralDirectorySize()) { ZipFileEntry entry = CentralDirectoryFileHeader.read(centralDirectory, zipData.getCharset()); zipData.addEntry(entry); } } | /**
* Reads and parses ZIP file entries from the central directory.
*
* @param fileOffset the file offset of the start of the central directory
* @throws ZipException if a ZIP format error has occurred
* @throws IOException if an I/O error has occurred
*/ | Reads and parses ZIP file entries from the central directory | readCentralDirectoryFileHeaders | {
"repo_name": "hhclam/bazel",
"path": "src/java_tools/singlejar/java/com/google/devtools/build/zip/ZipReader.java",
"license": "apache-2.0",
"size": 18677
} | [
"com.google.devtools.build.zip.ZipUtil",
"java.io.IOException"
] | import com.google.devtools.build.zip.ZipUtil; import java.io.IOException; | import com.google.devtools.build.zip.*; import java.io.*; | [
"com.google.devtools",
"java.io"
] | com.google.devtools; java.io; | 2,520,350 |
private void handleLocationChange(SensorPipelineEvent event) {
try {
JSONObject data = event.getEventDetails().getJSONObject("data");
double latitude = data.getDouble("latitude");
double longitude = data.getDouble("longitude");
setCurrentLocation(new LatLng(latitude, longitude));
} catch (JSONException e) {
e.printStackTrace();
}
} | void function(SensorPipelineEvent event) { try { JSONObject data = event.getEventDetails().getJSONObject("data"); double latitude = data.getDouble(STR); double longitude = data.getDouble(STR); setCurrentLocation(new LatLng(latitude, longitude)); } catch (JSONException e) { e.printStackTrace(); } } | /**
* Extract the coordinates from the context event and update the current location pin on the map
* @param event the sensor pipeline event
*/ | Extract the coordinates from the context event and update the current location pin on the map | handleLocationChange | {
"repo_name": "contexthub/boundaries-android",
"path": "BoundariesApp/app/src/main/java/com/contexthub/boundaries/fragments/GeofencesMapFragment.java",
"license": "mit",
"size": 8680
} | [
"com.chaione.contexthub.sdk.SensorPipelineEvent",
"com.google.android.gms.maps.model.LatLng",
"org.json.JSONException",
"org.json.JSONObject"
] | import com.chaione.contexthub.sdk.SensorPipelineEvent; import com.google.android.gms.maps.model.LatLng; import org.json.JSONException; import org.json.JSONObject; | import com.chaione.contexthub.sdk.*; import com.google.android.gms.maps.model.*; import org.json.*; | [
"com.chaione.contexthub",
"com.google.android",
"org.json"
] | com.chaione.contexthub; com.google.android; org.json; | 2,742,689 |
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_KEY_NUM_ATTACHMENTS_LOADING, mNumAttachmentsLoading);
outState.putString(STATE_KEY_WAITING_FOR_ATTACHMENTS, mWaitingForAttachments.name());
outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, createAttachmentList());
outState.putBoolean(STATE_KEY_CC_SHOWN, mCcWrapper.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccWrapper.getVisibility() == View.VISIBLE);
outState.putSerializable(STATE_KEY_QUOTED_TEXT_MODE, mQuotedTextMode);
outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed);
outState.putLong(STATE_KEY_DRAFT_ID, mDraftId);
outState.putSerializable(STATE_IDENTITY, mIdentity);
outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged);
outState.putSerializable(STATE_PGP_DATA, mPgpData);
outState.putString(STATE_IN_REPLY_TO, mInReplyTo);
outState.putString(STATE_REFERENCES, mReferences);
outState.putSerializable(STATE_KEY_HTML_QUOTE, mQuotedHtmlContent);
outState.putBoolean(STATE_KEY_READ_RECEIPT, mReadReceipt);
outState.putBoolean(STATE_KEY_DRAFT_NEEDS_SAVING, mDraftNeedsSaving);
outState.putBoolean(STATE_KEY_FORCE_PLAIN_TEXT, mForcePlainText);
outState.putSerializable(STATE_KEY_QUOTED_TEXT_FORMAT, mQuotedTextFormat);
} | void function(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_KEY_NUM_ATTACHMENTS_LOADING, mNumAttachmentsLoading); outState.putString(STATE_KEY_WAITING_FOR_ATTACHMENTS, mWaitingForAttachments.name()); outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, createAttachmentList()); outState.putBoolean(STATE_KEY_CC_SHOWN, mCcWrapper.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccWrapper.getVisibility() == View.VISIBLE); outState.putSerializable(STATE_KEY_QUOTED_TEXT_MODE, mQuotedTextMode); outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed); outState.putLong(STATE_KEY_DRAFT_ID, mDraftId); outState.putSerializable(STATE_IDENTITY, mIdentity); outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged); outState.putSerializable(STATE_PGP_DATA, mPgpData); outState.putString(STATE_IN_REPLY_TO, mInReplyTo); outState.putString(STATE_REFERENCES, mReferences); outState.putSerializable(STATE_KEY_HTML_QUOTE, mQuotedHtmlContent); outState.putBoolean(STATE_KEY_READ_RECEIPT, mReadReceipt); outState.putBoolean(STATE_KEY_DRAFT_NEEDS_SAVING, mDraftNeedsSaving); outState.putBoolean(STATE_KEY_FORCE_PLAIN_TEXT, mForcePlainText); outState.putSerializable(STATE_KEY_QUOTED_TEXT_FORMAT, mQuotedTextFormat); } | /**
* The framework handles most of the fields, but we need to handle stuff that we
* dynamically show and hide:
* Attachment list,
* Cc field,
* Bcc field,
* Quoted text,
*/ | The framework handles most of the fields, but we need to handle stuff that we dynamically show and hide: Attachment list, Cc field, Bcc field, Quoted text | onSaveInstanceState | {
"repo_name": "cooperpellaton/k-9",
"path": "k9mail/src/main/java/com/fsck/k9/activity/MessageCompose.java",
"license": "bsd-3-clause",
"size": 146279
} | [
"android.os.Bundle",
"android.view.View"
] | import android.os.Bundle; import android.view.View; | import android.os.*; import android.view.*; | [
"android.os",
"android.view"
] | android.os; android.view; | 194,468 |
public static byte[] compress(byte[] value, int offset, int length, int compressionLevel) {
ByteArrayOutputStream bos = new ByteArrayOutputStream(length);
Deflater compressor = new Deflater();
try {
compressor.setLevel(compressionLevel);
compressor.setInput(value, offset, length);
compressor.finish();
// Compress the data
final byte[] buf = new byte[1024];
while (!compressor.finished()) {
int count = compressor.deflate(buf);
bos.write(buf, 0, count);
}
} finally {
compressor.end();
}
return bos.toByteArray();
} | static byte[] function(byte[] value, int offset, int length, int compressionLevel) { ByteArrayOutputStream bos = new ByteArrayOutputStream(length); Deflater compressor = new Deflater(); try { compressor.setLevel(compressionLevel); compressor.setInput(value, offset, length); compressor.finish(); final byte[] buf = new byte[1024]; while (!compressor.finished()) { int count = compressor.deflate(buf); bos.write(buf, 0, count); } } finally { compressor.end(); } return bos.toByteArray(); } | /** Compresses the specified byte range using the
* specified compressionLevel (constants are defined in
* java.util.zip.Deflater). */ | Compresses the specified byte range using the specified compressionLevel (constants are defined in | compress | {
"repo_name": "chrishumphreys/provocateur",
"path": "provocateur-thirdparty/src/main/java/org/targettest/org/apache/lucene/document/CompressionTools.java",
"license": "apache-2.0",
"size": 4521
} | [
"java.io.ByteArrayOutputStream",
"java.util.zip.Deflater"
] | import java.io.ByteArrayOutputStream; import java.util.zip.Deflater; | import java.io.*; import java.util.zip.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,187,594 |
protected TokenizerFactory tokenizerFactory(String name, String... keysAndValues) throws Exception {
return tokenizerFactory(name, Version.LATEST, keysAndValues);
}
| TokenizerFactory function(String name, String... keysAndValues) throws Exception { return tokenizerFactory(name, Version.LATEST, keysAndValues); } | /**
* Returns a fully initialized TokenizerFactory with the specified name and key-value arguments.
* {@link ClasspathResourceLoader} is used for loading resources, so any required ones should
* be on the test classpath.
*/ | Returns a fully initialized TokenizerFactory with the specified name and key-value arguments. <code>ClasspathResourceLoader</code> is used for loading resources, so any required ones should be on the test classpath | tokenizerFactory | {
"repo_name": "ioantiba/solr-data",
"path": "src/test/java/net/global/software/analysis/BaseTokenStreamFactoryTestCase.java",
"license": "gpl-3.0",
"size": 6705
} | [
"org.apache.lucene.analysis.util.TokenizerFactory",
"org.apache.lucene.util.Version"
] | import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.util.Version; | import org.apache.lucene.analysis.util.*; import org.apache.lucene.util.*; | [
"org.apache.lucene"
] | org.apache.lucene; | 1,857,690 |
//-----------------------------------------------------------------------
public MetaProperty<ImmutableMap<ExplainKey<?>, Object>> map() {
return map;
} | MetaProperty<ImmutableMap<ExplainKey<?>, Object>> function() { return map; } | /**
* The meta-property for the {@code map} property.
* @return the meta-property, not null
*/ | The meta-property for the map property | map | {
"repo_name": "OpenGamma/Strata",
"path": "modules/market/src/main/java/com/opengamma/strata/market/explain/ExplainMap.java",
"license": "apache-2.0",
"size": 11030
} | [
"com.google.common.collect.ImmutableMap",
"org.joda.beans.MetaProperty"
] | import com.google.common.collect.ImmutableMap; import org.joda.beans.MetaProperty; | import com.google.common.collect.*; import org.joda.beans.*; | [
"com.google.common",
"org.joda.beans"
] | com.google.common; org.joda.beans; | 2,130,061 |
public boolean getFeature(String featureId)
throws SAXNotRecognizedException, SAXNotSupportedException {
try {
// http://xml.org/sax/features/use-entity-resolver2
// controls whether the methods of an object implementing
// org.xml.sax.ext.EntityResolver2 will be used by the parser.
//
if (featureId.equals(USE_ENTITY_RESOLVER2)) {
return fUseEntityResolver2;
}
//
// Default handling
//
return fConfiguration.getFeature(featureId);
}
catch (XMLConfigurationException e) {
String identifier = e.getIdentifier();
if (e.getType() == Status.NOT_RECOGNIZED) {
throw new SAXNotRecognizedException(
SAXMessageFormatter.formatMessage(fConfiguration.getLocale(),
"feature-not-recognized", new Object [] {identifier}));
}
else {
throw new SAXNotSupportedException(
SAXMessageFormatter.formatMessage(fConfiguration.getLocale(),
"feature-not-supported", new Object [] {identifier}));
}
}
} // getFeature(String):boolean | boolean function(String featureId) throws SAXNotRecognizedException, SAXNotSupportedException { try { return fUseEntityResolver2; } return fConfiguration.getFeature(featureId); } catch (XMLConfigurationException e) { String identifier = e.getIdentifier(); if (e.getType() == Status.NOT_RECOGNIZED) { throw new SAXNotRecognizedException( SAXMessageFormatter.formatMessage(fConfiguration.getLocale(), STR, new Object [] {identifier})); } else { throw new SAXNotSupportedException( SAXMessageFormatter.formatMessage(fConfiguration.getLocale(), STR, new Object [] {identifier})); } } } | /**
* Query the state of a feature.
*
* Query the current state of any feature in a SAX2 parser. The
* parser might not recognize the feature.
*
* @param featureId The unique identifier (URI) of the feature
* being set.
* @return The current state of the feature.
* @exception org.xml.sax.SAXNotRecognizedException If the
* requested feature is not known.
* @exception SAXNotSupportedException If the
* requested feature is known but not supported.
*/ | Query the state of a feature. Query the current state of any feature in a SAX2 parser. The parser might not recognize the feature | getFeature | {
"repo_name": "FauxFaux/jdk9-jaxp",
"path": "src/java.xml/share/classes/com/sun/org/apache/xerces/internal/parsers/DOMParser.java",
"license": "gpl-2.0",
"size": 24650
} | [
"com.sun.org.apache.xerces.internal.util.SAXMessageFormatter",
"com.sun.org.apache.xerces.internal.util.Status",
"com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException",
"org.xml.sax.SAXNotRecognizedException",
"org.xml.sax.SAXNotSupportedException"
] | import com.sun.org.apache.xerces.internal.util.SAXMessageFormatter; import com.sun.org.apache.xerces.internal.util.Status; import com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; | import com.sun.org.apache.xerces.internal.util.*; import com.sun.org.apache.xerces.internal.xni.parser.*; import org.xml.sax.*; | [
"com.sun.org",
"org.xml.sax"
] | com.sun.org; org.xml.sax; | 488,266 |
@Override
public Dimension minimumLayoutSize(Container parent) {
synchronized (parent.getTreeLock()) {
Insets insets = parent.getInsets();
int ncomponents = parent.getComponentCount();
int w = 0;
int h = 0;
boolean anyVisible = false;
for (int i = 0; i < ncomponents; i++) {
Component comp = parent.getComponent(i);
anyVisible |= comp.isVisible();
if (comp.isVisible() || sizeHidden) {
Dimension d = comp.getMinimumSize();
if (d.width > w) {
w = d.width;
}
if (d.height > h) {
h = d.height;
}
}
}
if (!anyVisible) {
return new Dimension(0, 0);
}
return new Dimension(insets.left + insets.right + w + hgap * 2,
insets.top + insets.bottom + h + vgap * 2);
}
}
| Dimension function(Container parent) { synchronized (parent.getTreeLock()) { Insets insets = parent.getInsets(); int ncomponents = parent.getComponentCount(); int w = 0; int h = 0; boolean anyVisible = false; for (int i = 0; i < ncomponents; i++) { Component comp = parent.getComponent(i); anyVisible = comp.isVisible(); if (comp.isVisible() sizeHidden) { Dimension d = comp.getMinimumSize(); if (d.width > w) { w = d.width; } if (d.height > h) { h = d.height; } } } if (!anyVisible) { return new Dimension(0, 0); } return new Dimension(insets.left + insets.right + w + hgap * 2, insets.top + insets.bottom + h + vgap * 2); } } | /**
* Calculates the minimum size for the specified panel.
*
* @param parent
* the parent container in which to do the layout
* @return the minimum dimensions required to lay out the subcomponents of
* the specified container
* @see java.awt.Container#doLayout
* @see java.awt.BetterCardLayout#preferredLayoutSize
*/ | Calculates the minimum size for the specified panel | minimumLayoutSize | {
"repo_name": "jedwards1211/breakout",
"path": "andork-ui/src/org/andork/awt/layout/BetterCardLayout.java",
"license": "gpl-2.0",
"size": 17253
} | [
"java.awt.Component",
"java.awt.Container",
"java.awt.Dimension",
"java.awt.Insets"
] | import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Insets; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,705,896 |
public Position merge(final Position other) {
if (other == null) {
return this;
} else {
for (final Entry<String, ConcurrentHashMap<Integer, Long>> entry : other.position.entrySet()) {
final String topic = entry.getKey();
final Map<Integer, Long> partitionMap =
position.computeIfAbsent(topic, k -> new ConcurrentHashMap<>());
for (final Entry<Integer, Long> partitionOffset : entry.getValue().entrySet()) {
final Integer partition = partitionOffset.getKey();
final Long offset = partitionOffset.getValue();
if (!partitionMap.containsKey(partition)
|| partitionMap.get(partition) < offset) {
partitionMap.put(partition, offset);
}
}
}
return this;
}
} | Position function(final Position other) { if (other == null) { return this; } else { for (final Entry<String, ConcurrentHashMap<Integer, Long>> entry : other.position.entrySet()) { final String topic = entry.getKey(); final Map<Integer, Long> partitionMap = position.computeIfAbsent(topic, k -> new ConcurrentHashMap<>()); for (final Entry<Integer, Long> partitionOffset : entry.getValue().entrySet()) { final Integer partition = partitionOffset.getKey(); final Long offset = partitionOffset.getValue(); if (!partitionMap.containsKey(partition) partitionMap.get(partition) < offset) { partitionMap.put(partition, offset); } } } return this; } } | /**
* Merges the provided Position into the current instance.
* <p>
* If both Positions contain the same topic -> partition -> offset mapping, the resulting
* Position will contain a mapping with the larger of the two offsets.
*/ | Merges the provided Position into the current instance. If both Positions contain the same topic -> partition -> offset mapping, the resulting Position will contain a mapping with the larger of the two offsets | merge | {
"repo_name": "TiVo/kafka",
"path": "streams/src/main/java/org/apache/kafka/streams/query/Position.java",
"license": "apache-2.0",
"size": 6825
} | [
"java.util.Map",
"java.util.concurrent.ConcurrentHashMap"
] | import java.util.Map; import java.util.concurrent.ConcurrentHashMap; | import java.util.*; import java.util.concurrent.*; | [
"java.util"
] | java.util; | 219,543 |
List<String> divideLine(DivisionData divisionData);
| List<String> divideLine(DivisionData divisionData); | /**
* Divide a LineString
*
* @param divisionData
* @return
*/ | Divide a LineString | divideLine | {
"repo_name": "geowe/geowe-geometry-service",
"path": "src/main/java/org/geowe/service/geometry/engine/GeoEngineer.java",
"license": "apache-2.0",
"size": 5970
} | [
"java.util.List",
"org.geowe.service.model.DivisionData"
] | import java.util.List; import org.geowe.service.model.DivisionData; | import java.util.*; import org.geowe.service.model.*; | [
"java.util",
"org.geowe.service"
] | java.util; org.geowe.service; | 206,813 |
public void setErrorView(ViewGroup errorView) {
this.mErrorView = errorView;
} | void function(ViewGroup errorView) { this.mErrorView = errorView; } | /**
* Sets error layout
*
* @param errorView the layout to be shown when list could not be loaded due to some error
*/ | Sets error layout | setErrorView | {
"repo_name": "a642500/Ybook",
"path": "app/src/main/java/com/ybook/app/EmptyLayout.java",
"license": "apache-2.0",
"size": 21757
} | [
"android.view.ViewGroup"
] | import android.view.ViewGroup; | import android.view.*; | [
"android.view"
] | android.view; | 1,393,336 |
void setUpdated(Date updated);
| void setUpdated(Date updated); | /**
* Set the client's updated date / time.
*
* @param updated new updated date.
*/ | Set the client's updated date / time | setUpdated | {
"repo_name": "groboclown/p4ic4idea",
"path": "p4java/src/main/java/com/perforce/p4java/client/IClientSummary.java",
"license": "apache-2.0",
"size": 10441
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 1,504,079 |
default List<T> list(
final ConnectorRequestContext context,
final QualifiedName name,
@Nullable final QualifiedName prefix,
@Nullable final Sort sort,
@Nullable final Pageable pageable
) {
throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE);
} | default List<T> list( final ConnectorRequestContext context, final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable ) { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } | /**
* Get a list of all the resources under the given resource identified by <code>name</code>. Optionally sort by
* <code>sort</code> and add pagination via <code>pageable</code>.
*
* @param context The request context
* @param name The name of the resource under which to list resources of type <code>T</code>
* @param prefix The optional prefix to apply to filter resources for listing
* @param sort Optional sorting parameters
* @param pageable Optional paging parameters
* @return A list of type <code>T</code> resources in the desired order if required
* @throws UnsupportedOperationException If the connector doesn't implement this method
*/ | Get a list of all the resources under the given resource identified by <code>name</code>. Optionally sort by <code>sort</code> and add pagination via <code>pageable</code> | list | {
"repo_name": "zhljen/metacat",
"path": "metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/ConnectorBaseService.java",
"license": "apache-2.0",
"size": 6181
} | [
"com.netflix.metacat.common.QualifiedName",
"com.netflix.metacat.common.dto.Pageable",
"com.netflix.metacat.common.dto.Sort",
"java.util.List",
"javax.annotation.Nullable"
] | import com.netflix.metacat.common.QualifiedName; import com.netflix.metacat.common.dto.Pageable; import com.netflix.metacat.common.dto.Sort; import java.util.List; import javax.annotation.Nullable; | import com.netflix.metacat.common.*; import com.netflix.metacat.common.dto.*; import java.util.*; import javax.annotation.*; | [
"com.netflix.metacat",
"java.util",
"javax.annotation"
] | com.netflix.metacat; java.util; javax.annotation; | 1,452,958 |
@SneakyThrows(IOException.class)
public static String compressToBase64(final String string) {
Preconditions.checkNotNull(string, "string");
@Cleanup
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
@Cleanup
final OutputStream base64OutputStream = Base64.getEncoder().wrap(byteArrayOutputStream);
@Cleanup
final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(base64OutputStream);
gzipOutputStream.write(string.getBytes(UTF_8));
gzipOutputStream.close();
return byteArrayOutputStream.toString(UTF_8.name());
} | @SneakyThrows(IOException.class) static String function(final String string) { Preconditions.checkNotNull(string, STR); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final OutputStream base64OutputStream = Base64.getEncoder().wrap(byteArrayOutputStream); final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(base64OutputStream); gzipOutputStream.write(string.getBytes(UTF_8)); gzipOutputStream.close(); return byteArrayOutputStream.toString(UTF_8.name()); } | /**
* Convert the given string to its compressed base64 representation.
* @param string String to be compressed to base64.
* @return String Compressed Base64 representation of the input string.
* @throws NullPointerException If string is null.
*/ | Convert the given string to its compressed base64 representation | compressToBase64 | {
"repo_name": "pravega/pravega",
"path": "common/src/main/java/io/pravega/common/util/ToStringUtils.java",
"license": "apache-2.0",
"size": 7591
} | [
"com.google.common.base.Preconditions",
"java.io.ByteArrayOutputStream",
"java.io.IOException",
"java.io.OutputStream",
"java.util.Base64",
"java.util.zip.GZIPOutputStream"
] | import com.google.common.base.Preconditions; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Base64; import java.util.zip.GZIPOutputStream; | import com.google.common.base.*; import java.io.*; import java.util.*; import java.util.zip.*; | [
"com.google.common",
"java.io",
"java.util"
] | com.google.common; java.io; java.util; | 2,859,033 |
@Nonnull
public DriveCollectionRequest top(final int value) {
addTopOption(value);
return this;
} | DriveCollectionRequest function(final int value) { addTopOption(value); return this; } | /**
* Sets the top value for the request
*
* @param value the max number of items to return
* @return the updated request
*/ | Sets the top value for the request | top | {
"repo_name": "microsoftgraph/msgraph-sdk-java",
"path": "src/main/java/com/microsoft/graph/requests/DriveCollectionRequest.java",
"license": "mit",
"size": 5478
} | [
"com.microsoft.graph.requests.DriveCollectionRequest"
] | import com.microsoft.graph.requests.DriveCollectionRequest; | import com.microsoft.graph.requests.*; | [
"com.microsoft.graph"
] | com.microsoft.graph; | 1,695,683 |
private static void appendAsJava(
Comparable value,
StringBuilder sb,
SqlTypeName typeName,
RelDataType type,
boolean java,
RexDigestIncludeType includeType) {
switch (typeName) {
case CHAR:
NlsString nlsString = (NlsString) value;
if (java) {
Util.printJavaString(sb, nlsString.getValue(), true);
} else {
boolean includeCharset =
(nlsString.getCharsetName() != null)
&& !nlsString
.getCharsetName()
.equals(CalciteSystemProperty.DEFAULT_CHARSET.value());
sb.append(nlsString.asSql(includeCharset, false));
}
break;
case BOOLEAN:
assert value instanceof Boolean;
sb.append(value.toString());
break;
case DECIMAL:
assert value instanceof BigDecimal;
sb.append(value.toString());
break;
case DOUBLE:
assert value instanceof BigDecimal;
sb.append(Util.toScientificNotation((BigDecimal) value));
break;
case BIGINT:
assert value instanceof BigDecimal;
long narrowLong = ((BigDecimal) value).longValue();
sb.append(String.valueOf(narrowLong));
sb.append('L');
break;
case BINARY:
assert value instanceof ByteString;
sb.append("X'");
sb.append(((ByteString) value).toString(16));
sb.append("'");
break;
case NULL:
assert value == null;
sb.append("null");
break;
case SARG:
assert value instanceof Sarg;
//noinspection unchecked,rawtypes
Util.asStringBuilder(sb, sb2 -> printSarg(sb2, (Sarg) value, type));
break;
case SYMBOL:
assert value instanceof Enum;
sb.append("FLAG(");
sb.append(value.toString());
sb.append(")");
break;
case DATE:
assert value instanceof DateString;
sb.append(value.toString());
break;
case TIME:
case TIME_WITH_LOCAL_TIME_ZONE:
assert value instanceof TimeString;
sb.append(value.toString());
break;
case TIMESTAMP:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
assert value instanceof TimestampString;
sb.append(value.toString());
break;
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case INTERVAL_MONTH:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
assert value instanceof BigDecimal;
sb.append(value.toString());
break;
case MULTISET:
case ROW:
final List<RexLiteral> list = (List) value;
Util.asStringBuilder(
sb,
sb2 ->
Util.printList(
sb,
list.size(),
(sb3, i) ->
sb3.append(
list.get(i).computeDigest(includeType))));
break;
case GEOMETRY:
final String wkt = GeoFunctions.ST_AsWKT((Geometries.Geom) value);
sb.append(wkt);
break;
default:
assert valueMatchesType(value, typeName, true);
throw Util.needToImplement(typeName);
}
} | static void function( Comparable value, StringBuilder sb, SqlTypeName typeName, RelDataType type, boolean java, RexDigestIncludeType includeType) { switch (typeName) { case CHAR: NlsString nlsString = (NlsString) value; if (java) { Util.printJavaString(sb, nlsString.getValue(), true); } else { boolean includeCharset = (nlsString.getCharsetName() != null) && !nlsString .getCharsetName() .equals(CalciteSystemProperty.DEFAULT_CHARSET.value()); sb.append(nlsString.asSql(includeCharset, false)); } break; case BOOLEAN: assert value instanceof Boolean; sb.append(value.toString()); break; case DECIMAL: assert value instanceof BigDecimal; sb.append(value.toString()); break; case DOUBLE: assert value instanceof BigDecimal; sb.append(Util.toScientificNotation((BigDecimal) value)); break; case BIGINT: assert value instanceof BigDecimal; long narrowLong = ((BigDecimal) value).longValue(); sb.append(String.valueOf(narrowLong)); sb.append('L'); break; case BINARY: assert value instanceof ByteString; sb.append("X'"); sb.append(((ByteString) value).toString(16)); sb.append("'"); break; case NULL: assert value == null; sb.append("null"); break; case SARG: assert value instanceof Sarg; Util.asStringBuilder(sb, sb2 -> printSarg(sb2, (Sarg) value, type)); break; case SYMBOL: assert value instanceof Enum; sb.append("FLAG("); sb.append(value.toString()); sb.append(")"); break; case DATE: assert value instanceof DateString; sb.append(value.toString()); break; case TIME: case TIME_WITH_LOCAL_TIME_ZONE: assert value instanceof TimeString; sb.append(value.toString()); break; case TIMESTAMP: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: assert value instanceof TimestampString; sb.append(value.toString()); break; case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_MONTH: case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: assert value instanceof BigDecimal; sb.append(value.toString()); break; case MULTISET: case ROW: final List<RexLiteral> list = (List) value; Util.asStringBuilder( sb, sb2 -> Util.printList( sb, list.size(), (sb3, i) -> sb3.append( list.get(i).computeDigest(includeType)))); break; case GEOMETRY: final String wkt = GeoFunctions.ST_AsWKT((Geometries.Geom) value); sb.append(wkt); break; default: assert valueMatchesType(value, typeName, true); throw Util.needToImplement(typeName); } } | /**
* Appends the specified value in the provided destination as a Java string. The value must be
* consistent with the type, as per {@link #valueMatchesType}.
*
* <p>Typical return values:
*
* <ul>
* <li>true
* <li>null
* <li>"Hello, world!"
* <li>1.25
* <li>1234ABCD
* </ul>
*
* @param value Value to be appended to the provided destination as a Java string
* @param sb Destination to which to append the specified value
* @param typeName Type name to be used for the transformation of the value to a Java string
* @param type Type to be used for the transformation of the value to a Java string
* @param includeType Whether to include the data type in the Java representation
*/ | Appends the specified value in the provided destination as a Java string. The value must be consistent with the type, as per <code>#valueMatchesType</code>. Typical return values: true null "Hello, world!" 1.25 1234ABCD | appendAsJava | {
"repo_name": "apache/flink",
"path": "flink-table/flink-table-planner/src/main/java/org/apache/calcite/rex/RexLiteral.java",
"license": "apache-2.0",
"size": 47548
} | [
"java.math.BigDecimal",
"java.util.List",
"org.apache.calcite.avatica.util.ByteString",
"org.apache.calcite.config.CalciteSystemProperty",
"org.apache.calcite.rel.type.RelDataType",
"org.apache.calcite.runtime.GeoFunctions",
"org.apache.calcite.runtime.Geometries",
"org.apache.calcite.sql.type.SqlTypeName",
"org.apache.calcite.util.DateString",
"org.apache.calcite.util.NlsString",
"org.apache.calcite.util.Sarg",
"org.apache.calcite.util.TimeString",
"org.apache.calcite.util.TimestampString",
"org.apache.calcite.util.Util"
] | import java.math.BigDecimal; import java.util.List; import org.apache.calcite.avatica.util.ByteString; import org.apache.calcite.config.CalciteSystemProperty; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.runtime.GeoFunctions; import org.apache.calcite.runtime.Geometries; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.DateString; import org.apache.calcite.util.NlsString; import org.apache.calcite.util.Sarg; import org.apache.calcite.util.TimeString; import org.apache.calcite.util.TimestampString; import org.apache.calcite.util.Util; | import java.math.*; import java.util.*; import org.apache.calcite.avatica.util.*; import org.apache.calcite.config.*; import org.apache.calcite.rel.type.*; import org.apache.calcite.runtime.*; import org.apache.calcite.sql.type.*; import org.apache.calcite.util.*; | [
"java.math",
"java.util",
"org.apache.calcite"
] | java.math; java.util; org.apache.calcite; | 1,882,132 |
public IBlockState getStateFromMeta(int meta)
{
return this.getDefaultState().withProperty(FACING, EnumFacing.getHorizontal(meta & 3)).withProperty(DAMAGE, Integer.valueOf((meta & 15) >> 2));
} | IBlockState function(int meta) { return this.getDefaultState().withProperty(FACING, EnumFacing.getHorizontal(meta & 3)).withProperty(DAMAGE, Integer.valueOf((meta & 15) >> 2)); } | /**
* Convert the given metadata into a BlockState for this Block
*/ | Convert the given metadata into a BlockState for this Block | getStateFromMeta | {
"repo_name": "MartyParty21/AwakenDreamsClient",
"path": "mcp/src/minecraft/net/minecraft/block/BlockAnvil.java",
"license": "gpl-3.0",
"size": 7488
} | [
"net.minecraft.block.state.IBlockState",
"net.minecraft.util.EnumFacing"
] | import net.minecraft.block.state.IBlockState; import net.minecraft.util.EnumFacing; | import net.minecraft.block.state.*; import net.minecraft.util.*; | [
"net.minecraft.block",
"net.minecraft.util"
] | net.minecraft.block; net.minecraft.util; | 1,632,436 |
private DataQueryParams removeFilter( String filter )
{
this.filters.remove( new BaseDimensionalObject( filter ) );
return this;
} | DataQueryParams function( String filter ) { this.filters.remove( new BaseDimensionalObject( filter ) ); return this; } | /**
* Removes the filter with the given identifier.
*/ | Removes the filter with the given identifier | removeFilter | {
"repo_name": "hispindia/dhis2-Core",
"path": "dhis-2/dhis-services/dhis-service-analytics/src/main/java/org/hisp/dhis/analytics/DataQueryParams.java",
"license": "bsd-3-clause",
"size": 105104
} | [
"org.hisp.dhis.common.BaseDimensionalObject"
] | import org.hisp.dhis.common.BaseDimensionalObject; | import org.hisp.dhis.common.*; | [
"org.hisp.dhis"
] | org.hisp.dhis; | 775,159 |
@Test
@Verifies(value = "should not partial match name on internal substrings", method = "findEncounterTypes(String)")
public void findEncounterTypes_shouldNotPartialMatchNameOnInternalSubstrings() throws Exception {
EncounterService encounterService = Context.getEncounterService();
List<EncounterType> types = encounterService.findEncounterTypes("Test Enc Type");
assertEquals(3, types.size());
types = encounterService.findEncounterTypes("Enc Type");
assertEquals(0, types.size());
}
| @Verifies(value = STR, method = STR) void function() throws Exception { EncounterService encounterService = Context.getEncounterService(); List<EncounterType> types = encounterService.findEncounterTypes(STR); assertEquals(3, types.size()); types = encounterService.findEncounterTypes(STR); assertEquals(0, types.size()); } | /**
* No types should be returned when using a substring other than the starting substring
*
* @see EncounterService#findEncounterTypes(String)
*/ | No types should be returned when using a substring other than the starting substring | findEncounterTypes_shouldNotPartialMatchNameOnInternalSubstrings | {
"repo_name": "koskedk/openmrs-core",
"path": "api/src/test/java/org/openmrs/api/EncounterServiceTest.java",
"license": "mpl-2.0",
"size": 129579
} | [
"java.util.List",
"org.junit.Assert",
"org.openmrs.EncounterType",
"org.openmrs.api.context.Context",
"org.openmrs.test.Verifies"
] | import java.util.List; import org.junit.Assert; import org.openmrs.EncounterType; import org.openmrs.api.context.Context; import org.openmrs.test.Verifies; | import java.util.*; import org.junit.*; import org.openmrs.*; import org.openmrs.api.context.*; import org.openmrs.test.*; | [
"java.util",
"org.junit",
"org.openmrs",
"org.openmrs.api",
"org.openmrs.test"
] | java.util; org.junit; org.openmrs; org.openmrs.api; org.openmrs.test; | 1,330,133 |
public static void removeGUIElement(GUIElement element)
{
synchronized(guiObjects)
{
guiObjects.remove(element);
}
}
| static void function(GUIElement element) { synchronized(guiObjects) { guiObjects.remove(element); } } | /** Remove a GUI object from the list of active GUI objects. This is done
* automatically by the layering system.
* @param element the GUI object to remove
*/ | Remove a GUI object from the list of active GUI objects. This is done automatically by the layering system | removeGUIElement | {
"repo_name": "Dimondheart/JavaGameEngine",
"path": "src/xyz/digitalcookies/objective/input/GUIMonitor.java",
"license": "apache-2.0",
"size": 2292
} | [
"xyz.digitalcookies.objective.input.gui.GUIElement"
] | import xyz.digitalcookies.objective.input.gui.GUIElement; | import xyz.digitalcookies.objective.input.gui.*; | [
"xyz.digitalcookies.objective"
] | xyz.digitalcookies.objective; | 599,737 |
public boolean tryReserve(QueryId queryId, String allocationTag, long bytes)
{
checkArgument(bytes >= 0, "bytes is negative");
synchronized (this) {
if (getFreeBytes() - bytes < 0) {
return false;
}
reservedBytes += bytes;
if (bytes != 0) {
queryMemoryReservations.merge(queryId, bytes, Long::sum);
updateTaggedMemoryAllocations(queryId, allocationTag, bytes);
}
}
onMemoryReserved();
return true;
} | boolean function(QueryId queryId, String allocationTag, long bytes) { checkArgument(bytes >= 0, STR); synchronized (this) { if (getFreeBytes() - bytes < 0) { return false; } reservedBytes += bytes; if (bytes != 0) { queryMemoryReservations.merge(queryId, bytes, Long::sum); updateTaggedMemoryAllocations(queryId, allocationTag, bytes); } } onMemoryReserved(); return true; } | /**
* Try to reserve the given number of bytes. Return value indicates whether the caller may use the requested memory.
*/ | Try to reserve the given number of bytes. Return value indicates whether the caller may use the requested memory | tryReserve | {
"repo_name": "ebyhr/presto",
"path": "core/trino-main/src/main/java/io/trino/memory/MemoryPool.java",
"license": "apache-2.0",
"size": 12812
} | [
"com.google.common.base.Preconditions",
"io.trino.spi.QueryId"
] | import com.google.common.base.Preconditions; import io.trino.spi.QueryId; | import com.google.common.base.*; import io.trino.spi.*; | [
"com.google.common",
"io.trino.spi"
] | com.google.common; io.trino.spi; | 8,965 |
protected AmbariStreamProvider createRemoteAmbariStreamProvider(Long clusterId) {
RemoteAmbariClusterEntity clusterEntity = remoteAmbariClusterDAO.findById(clusterId);
if (clusterEntity != null) {
return new RemoteAmbariStreamProvider(getBaseurl(clusterEntity.getUrl()),
clusterEntity.getUsername(), clusterEntity.getPassword(),
configuration.getViewAmbariRequestConnectTimeout(), configuration.getViewAmbariRequestReadTimeout());
}
return null;
} | AmbariStreamProvider function(Long clusterId) { RemoteAmbariClusterEntity clusterEntity = remoteAmbariClusterDAO.findById(clusterId); if (clusterEntity != null) { return new RemoteAmbariStreamProvider(getBaseurl(clusterEntity.getUrl()), clusterEntity.getUsername(), clusterEntity.getPassword(), configuration.getViewAmbariRequestConnectTimeout(), configuration.getViewAmbariRequestReadTimeout()); } return null; } | /**
* Get Remote Ambari Cluster Stream provider
*
* @param clusterId
* @return
*/ | Get Remote Ambari Cluster Stream provider | createRemoteAmbariStreamProvider | {
"repo_name": "alexryndin/ambari",
"path": "ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java",
"license": "apache-2.0",
"size": 89041
} | [
"org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity",
"org.apache.ambari.view.AmbariStreamProvider"
] | import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity; import org.apache.ambari.view.AmbariStreamProvider; | import org.apache.ambari.server.orm.entities.*; import org.apache.ambari.view.*; | [
"org.apache.ambari"
] | org.apache.ambari; | 2,507,278 |
private void updateRegion() {
Insets i = autoScroll.getAutoscrollInsets();
Dimension size = component.getSize();
if (size.width != outer.width || size.height != outer.height)
outer.reshape(0, 0, size.width, size.height);
if (inner.x != i.left || inner.y != i.top)
inner.setLocation(i.left, i.top);
int newWidth = size.width - (i.left + i.right);
int newHeight = size.height - (i.top + i.bottom);
if (newWidth != inner.width || newHeight != inner.height)
inner.setSize(newWidth, newHeight);
} | void function() { Insets i = autoScroll.getAutoscrollInsets(); Dimension size = component.getSize(); if (size.width != outer.width size.height != outer.height) outer.reshape(0, 0, size.width, size.height); if (inner.x != i.left inner.y != i.top) inner.setLocation(i.left, i.top); int newWidth = size.width - (i.left + i.right); int newHeight = size.height - (i.top + i.bottom); if (newWidth != inner.width newHeight != inner.height) inner.setSize(newWidth, newHeight); } | /**
* update the geometry of the autoscroll region
*/ | update the geometry of the autoscroll region | updateRegion | {
"repo_name": "isaacl/openjdk-jdk",
"path": "src/share/classes/java/awt/dnd/DropTarget.java",
"license": "gpl-2.0",
"size": 28043
} | [
"java.awt.Dimension",
"java.awt.Insets"
] | import java.awt.Dimension; import java.awt.Insets; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,827,100 |
protected void addPathPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_SqlFileType_path_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_SqlFileType_path_feature", "_UI_SqlFileType_type"),
DbchangelogPackage.eINSTANCE.getSqlFileType_Path(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
| void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), DbchangelogPackage.eINSTANCE.getSqlFileType_Path(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } | /**
* This adds a property descriptor for the Path feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Path feature. | addPathPropertyDescriptor | {
"repo_name": "Treehopper/EclipseAugments",
"path": "liquibase-editor/eu.hohenegger.xsd.liquibase.ui/src-gen/org/liquibase/xml/ns/dbchangelog/provider/SqlFileTypeItemProvider.java",
"license": "epl-1.0",
"size": 11226
} | [
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor",
"org.liquibase.xml.ns.dbchangelog.DbchangelogPackage"
] | import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.liquibase.xml.ns.dbchangelog.DbchangelogPackage; | import org.eclipse.emf.edit.provider.*; import org.liquibase.xml.ns.dbchangelog.*; | [
"org.eclipse.emf",
"org.liquibase.xml"
] | org.eclipse.emf; org.liquibase.xml; | 2,429,749 |
@ServiceMethod(returns = ReturnType.SINGLE)
OpenShiftManagedClusterInner getByResourceGroup(String resourceGroupName, String resourceName); | @ServiceMethod(returns = ReturnType.SINGLE) OpenShiftManagedClusterInner getByResourceGroup(String resourceGroupName, String resourceName); | /**
* Gets the details of the managed OpenShift cluster with a specified resource group and name.
*
* @param resourceGroupName The name of the resource group.
* @param resourceName The name of the OpenShift managed cluster resource.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the details of the managed OpenShift cluster with a specified resource group and name.
*/ | Gets the details of the managed OpenShift cluster with a specified resource group and name | getByResourceGroup | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-containerservice/src/main/java/com/azure/resourcemanager/containerservice/fluent/OpenShiftManagedClustersClient.java",
"license": "mit",
"size": 28206
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.containerservice.fluent.models.OpenShiftManagedClusterInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.containerservice.fluent.models.OpenShiftManagedClusterInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.containerservice.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 285,895 |
@SuppressWarnings("unchecked")
public static <T> Degrees51Mapping<T> retrieveMappingByDxaKey(String dxaKey) {
try {
return (Degrees51Mapping<T>) getContext().getBean(dxaKey, Degrees51Mapping.class);
} catch (NoSuchBeanDefinitionException e) {
return null;
}
} | @SuppressWarnings(STR) static <T> Degrees51Mapping<T> function(String dxaKey) { try { return (Degrees51Mapping<T>) getContext().getBean(dxaKey, Degrees51Mapping.class); } catch (NoSuchBeanDefinitionException e) { return null; } } | /**
* Tries to resolve a registered mapping from Spring context by its name.
* Typically name of this mapping is a DXA claim name.
*
* @param dxaKey name of the mapping, typically DXA claim name
* @param <T> an expected type of final value for this mapping
* @return a mapping if found or null
*/ | Tries to resolve a registered mapping from Spring context by its name. Typically name of this mapping is a DXA claim name | retrieveMappingByDxaKey | {
"repo_name": "sdl/dxa-modules",
"path": "webapp-java/dxa-module-51degrees/src/main/java/com/sdl/dxa/modules/degrees51/api/mapping/Degrees51Mapping.java",
"license": "apache-2.0",
"size": 3705
} | [
"com.sdl.webapp.common.util.ApplicationContextHolder",
"org.springframework.beans.factory.NoSuchBeanDefinitionException"
] | import com.sdl.webapp.common.util.ApplicationContextHolder; import org.springframework.beans.factory.NoSuchBeanDefinitionException; | import com.sdl.webapp.common.util.*; import org.springframework.beans.factory.*; | [
"com.sdl.webapp",
"org.springframework.beans"
] | com.sdl.webapp; org.springframework.beans; | 1,948,175 |
public Set<String> getRoles(); | Set<String> function(); | /** Gets the roles played by the SOAP binding instance.
*
* @return Set<String> The set of roles played by the binding instance.
**/ | Gets the roles played by the SOAP binding instance | getRoles | {
"repo_name": "samskivert/ikvm-openjdk",
"path": "build/linux-amd64/impsrc/javax/xml/ws/soap/SOAPBinding.java",
"license": "gpl-2.0",
"size": 3806
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 1,909,075 |
@Test
public void readInterval() {
DataSource source = new SimpleFileDataSource(
"src/test/resources/mini-archive-export.csv");
DataRequestThread thread = null;
try {
start = dateFormat.parse("2014-03-14 16:00").toInstant();
end = dateFormat.parse("2014-03-14 17:00").toInstant();
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(start, end));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(234, dataTimes.get(thread).size());
// inverse interval => same result
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(end, start));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(234, dataTimes.get(thread).size());
// empty interval => no data
start = dateFormat.parse("2014-03-14 17:00").toInstant();
end = dateFormat.parse("2014-03-14 18:00").toInstant();
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(start, end));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(0, dataTimes.get(thread).size());
// only start => same result
start = dateFormat.parse("2014-03-14 16:00").toInstant();
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(start, null));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(234, dataTimes.get(thread).size());
// only end => no data
end = dateFormat.parse("2014-03-14 17:00").toInstant();
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(null, end));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(0, dataTimes.get(thread).size());
// infinite => no data
end = dateFormat.parse("2014-03-14 17:00").toInstant();
thread = new DataRequestThread("TEST-BTY0:AI1", source,
TimeInterval.between(null, null));
thread.addListener(new TUListener());
startAndCount(thread);
Assert.assertEquals(0, dataTimes.get(thread).size());
// test wrong parameters
try {
thread = new DataRequestThread(null, source,
TimeInterval.between(null, null));
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(true);
}
try {
thread = new DataRequestThread("TEST-BTY0:AI1", null,
TimeInterval.between(null, null));
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(true);
}
try {
thread = new DataRequestThread("TEST-BTY0:AI1", source, null);
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(true);
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
} | void function() { DataSource source = new SimpleFileDataSource( STR); DataRequestThread thread = null; try { start = dateFormat.parse(STR).toInstant(); end = dateFormat.parse(STR).toInstant(); thread = new DataRequestThread(STR, source, TimeInterval.between(start, end)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(234, dataTimes.get(thread).size()); thread = new DataRequestThread(STR, source, TimeInterval.between(end, start)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(234, dataTimes.get(thread).size()); start = dateFormat.parse(STR).toInstant(); end = dateFormat.parse(STR).toInstant(); thread = new DataRequestThread(STR, source, TimeInterval.between(start, end)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(0, dataTimes.get(thread).size()); start = dateFormat.parse(STR).toInstant(); thread = new DataRequestThread(STR, source, TimeInterval.between(start, null)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(234, dataTimes.get(thread).size()); end = dateFormat.parse(STR).toInstant(); thread = new DataRequestThread(STR, source, TimeInterval.between(null, end)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(0, dataTimes.get(thread).size()); end = dateFormat.parse(STR).toInstant(); thread = new DataRequestThread(STR, source, TimeInterval.between(null, null)); thread.addListener(new TUListener()); startAndCount(thread); Assert.assertEquals(0, dataTimes.get(thread).size()); try { thread = new DataRequestThread(null, source, TimeInterval.between(null, null)); Assert.fail(); } catch (Exception e) { Assert.assertTrue(true); } try { thread = new DataRequestThread(STR, null, TimeInterval.between(null, null)); Assert.fail(); } catch (Exception e) { Assert.assertTrue(true); } try { thread = new DataRequestThread(STR, source, null); Assert.fail(); } catch (Exception e) { Assert.assertTrue(true); } } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } | /**
* Test that {@link DataRequestThread} reads all samples from a small dedicated
* file. Test that samples are grouped in chunks. Test that result are
* returned in chronological order. Test that wrong intervals values do not
* raise an exception.
*/ | Test that <code>DataRequestThread</code> reads all samples from a small dedicated file. Test that samples are grouped in chunks. Test that result are returned in chronological order. Test that wrong intervals values do not raise an exception | readInterval | {
"repo_name": "ControlSystemStudio/diirt",
"path": "pvmanager/datasource-timecache/src/test/java/org/diirt/datasource/timecache/impl/SimpleFileDataSourceUnitTests.java",
"license": "mit",
"size": 10917
} | [
"org.diirt.datasource.timecache.DataRequestThread",
"org.diirt.datasource.timecache.impl.SimpleFileDataSource",
"org.diirt.datasource.timecache.source.DataSource",
"org.diirt.util.time.TimeInterval",
"org.junit.Assert"
] | import org.diirt.datasource.timecache.DataRequestThread; import org.diirt.datasource.timecache.impl.SimpleFileDataSource; import org.diirt.datasource.timecache.source.DataSource; import org.diirt.util.time.TimeInterval; import org.junit.Assert; | import org.diirt.datasource.timecache.*; import org.diirt.datasource.timecache.impl.*; import org.diirt.datasource.timecache.source.*; import org.diirt.util.time.*; import org.junit.*; | [
"org.diirt.datasource",
"org.diirt.util",
"org.junit"
] | org.diirt.datasource; org.diirt.util; org.junit; | 809,481 |
public static void getUsedVariables( String aString, String open, String close, List<String> list,
boolean includeSystemVariables ) {
if ( aString == null ) {
return;
}
int p = 0;
while ( p < aString.length() ) {
// OK, we found something... : start of Unix variable
if ( aString.substring( p ).startsWith( open ) ) {
// See if it's closed...
int from = p + open.length();
int to = aString.indexOf( close, from + 1 );
if ( to >= 0 ) {
String variable = aString.substring( from, to );
if ( Const.indexOfString( variable, list ) < 0 ) {
// Either we include the system variables (all)
// Or the variable is not a system variable
// Or it's a system variable but the value has not been set (and we offer the user the option to set it)
//
if ( includeSystemVariables || !isSystemVariable( variable ) || System.getProperty( variable ) == null ) {
list.add( variable );
}
}
// OK, continue
p = to + close.length();
}
}
p++;
}
} | static void function( String aString, String open, String close, List<String> list, boolean includeSystemVariables ) { if ( aString == null ) { return; } int p = 0; while ( p < aString.length() ) { if ( aString.substring( p ).startsWith( open ) ) { int from = p + open.length(); int to = aString.indexOf( close, from + 1 ); if ( to >= 0 ) { String variable = aString.substring( from, to ); if ( Const.indexOfString( variable, list ) < 0 ) { list.add( variable ); } } p = to + close.length(); } } p++; } } | /**
* Search the string and report back on the variables used
*
* @param aString
* The string to search
* @param open
* the open or "start of variable" characters ${ or %%
* @param close
* the close or "end of variable" characters } or %%
* @param list
* the list of variables to add to
* @param includeSystemVariables
* also check for system variables.
*/ | Search the string and report back on the variables used | getUsedVariables | {
"repo_name": "roboguy/pentaho-kettle",
"path": "core/src/main/java/org/pentaho/di/core/util/StringUtil.java",
"license": "apache-2.0",
"size": 22710
} | [
"java.util.List",
"org.pentaho.di.core.Const"
] | import java.util.List; import org.pentaho.di.core.Const; | import java.util.*; import org.pentaho.di.core.*; | [
"java.util",
"org.pentaho.di"
] | java.util; org.pentaho.di; | 1,923,847 |
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(SystemErrorReaction.class)) {
case WTSpecPackage.SYSTEM_ERROR_REACTION__SYS_ID:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
}
super.notifyChanged(notification);
}
| void function(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(SystemErrorReaction.class)) { case WTSpecPackage.SYSTEM_ERROR_REACTION__SYS_ID: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; } super.notifyChanged(notification); } | /**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>. | notifyChanged | {
"repo_name": "FTSRG/mondo-collab-framework",
"path": "archive/workspaceTracker/VA/ikerlanEMF.edit/src/eu/mondo/collaboration/operationtracemodel/example/WTSpec/provider/SystemErrorReactionItemProvider.java",
"license": "epl-1.0",
"size": 4636
} | [
"eu.mondo.collaboration.operationtracemodel.example.WTSpec",
"org.eclipse.emf.common.notify.Notification",
"org.eclipse.emf.edit.provider.ViewerNotification"
] | import eu.mondo.collaboration.operationtracemodel.example.WTSpec; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ViewerNotification; | import eu.mondo.collaboration.operationtracemodel.example.*; import org.eclipse.emf.common.notify.*; import org.eclipse.emf.edit.provider.*; | [
"eu.mondo.collaboration",
"org.eclipse.emf"
] | eu.mondo.collaboration; org.eclipse.emf; | 1,635,657 |
VirtualLink getLink(NetworkId networkId, ConnectPoint src, ConnectPoint dst); | VirtualLink getLink(NetworkId networkId, ConnectPoint src, ConnectPoint dst); | /**
* Returns the virtual link matching the network identifier, source connect point,
* and destination connect point.
*
* @param networkId network identifier
* @param src source connect point
* @param dst destination connect point
* @return virtual link
*/ | Returns the virtual link matching the network identifier, source connect point, and destination connect point | getLink | {
"repo_name": "LorenzReinhart/ONOSnew",
"path": "incubator/api/src/main/java/org/onosproject/incubator/net/virtual/VirtualNetworkStore.java",
"license": "apache-2.0",
"size": 8327
} | [
"org.onosproject.net.ConnectPoint"
] | import org.onosproject.net.ConnectPoint; | import org.onosproject.net.*; | [
"org.onosproject.net"
] | org.onosproject.net; | 2,076,945 |
public void setOnItemClickListener(OnItemClickListener listener) {
mRefreshableView.setOnItemClickListener(listener);
} | void function(OnItemClickListener listener) { mRefreshableView.setOnItemClickListener(listener); } | /**
* Pass-through method for {@link PullToRefreshBase#getRefreshableView()
* getRefreshableView()}.
* {@link AdapterView#setOnItemClickListener(OnItemClickListener)
* setOnItemClickListener(listener)}. This is just for convenience!
*
* @param listener - OnItemClickListener to use
*/ | Pass-through method for <code>PullToRefreshBase#getRefreshableView() getRefreshableView()</code>. <code>AdapterView#setOnItemClickListener(OnItemClickListener) setOnItemClickListener(listener)</code>. This is just for convenience | setOnItemClickListener | {
"repo_name": "bankeshkumar90/TabSwipeListener",
"path": "libs/push_lib/src/com/triazine/pulltorefresh/library/PullToRefreshAdapterViewBase.java",
"license": "mit",
"size": 14281
} | [
"android.widget.AdapterView"
] | import android.widget.AdapterView; | import android.widget.*; | [
"android.widget"
] | android.widget; | 87,223 |
public void setLocales(final Collection<? extends Locale> newValues) {
locales = copyCollection(newValues, locales, Locale.class);
} | void function(final Collection<? extends Locale> newValues) { locales = copyCollection(newValues, locales, Locale.class); } | /**
* Set information about an alternatively used localized character string for a linguistic
* extension.
*
* @since 2.4
*/ | Set information about an alternatively used localized character string for a linguistic extension | setLocales | {
"repo_name": "geotools/geotools",
"path": "modules/library/metadata/src/main/java/org/geotools/metadata/iso/MetaDataImpl.java",
"license": "lgpl-2.1",
"size": 18059
} | [
"java.util.Collection",
"java.util.Locale"
] | import java.util.Collection; import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 2,359,016 |
protected EventBus createEventBusC2V() {
return new EventBusImpl();
} | EventBus function() { return new EventBusImpl(); } | /**
* Create a new instance of EventBus for events from controllers to views. This event bus
* will be injected into fields annotated by {@link EventBusC2V}.
*
* @return The event bus
*/ | Create a new instance of EventBus for events from controllers to views. This event bus will be injected into fields annotated by <code>EventBusC2V</code> | createEventBusC2V | {
"repo_name": "tananrules/AndroidMvc",
"path": "library/android-mvc-controller/src/main/java/com/shipdream/lib/android/mvc/MvcGraph.java",
"license": "apache-2.0",
"size": 15931
} | [
"com.shipdream.lib.android.mvc.event.bus.EventBus",
"com.shipdream.lib.android.mvc.event.bus.internal.EventBusImpl"
] | import com.shipdream.lib.android.mvc.event.bus.EventBus; import com.shipdream.lib.android.mvc.event.bus.internal.EventBusImpl; | import com.shipdream.lib.android.mvc.event.bus.*; import com.shipdream.lib.android.mvc.event.bus.internal.*; | [
"com.shipdream.lib"
] | com.shipdream.lib; | 671,684 |
protected String lookupDnByUid(String uid, String baseDN) throws SearchException {
Filter filter = Filter.createEqualityFilter(LdapOperationsServiceImpl.UID, uid);
SearchResult searchResult = search(baseDN, filter, 1, 1);
if ((searchResult != null) && searchResult.getEntryCount() > 0) {
return searchResult.getSearchEntries().get(0).getDN();
}
return null;
} | String function(String uid, String baseDN) throws SearchException { Filter filter = Filter.createEqualityFilter(LdapOperationsServiceImpl.UID, uid); SearchResult searchResult = search(baseDN, filter, 1, 1); if ((searchResult != null) && searchResult.getEntryCount() > 0) { return searchResult.getSearchEntries().get(0).getDN(); } return null; } | /**
* Looks the uid in ldap and return the DN
*/ | Looks the uid in ldap and return the DN | lookupDnByUid | {
"repo_name": "madumlao/oxCore",
"path": "oxLdap/src/main/java/org/gluu/persist/ldap/operation/impl/LdapOperationsServiceImpl.java",
"license": "mit",
"size": 42487
} | [
"com.unboundid.ldap.sdk.Filter",
"com.unboundid.ldap.sdk.SearchResult",
"org.gluu.persist.exception.operation.SearchException"
] | import com.unboundid.ldap.sdk.Filter; import com.unboundid.ldap.sdk.SearchResult; import org.gluu.persist.exception.operation.SearchException; | import com.unboundid.ldap.sdk.*; import org.gluu.persist.exception.operation.*; | [
"com.unboundid.ldap",
"org.gluu.persist"
] | com.unboundid.ldap; org.gluu.persist; | 1,134,479 |
public Map<String, double[]> tagIndividuals(Vector<Individual> individuals,
int tagLevel, int tagDepth, boolean doSetTags, int tagMethod) {
int numThreads = config.getNumThreads();
int chunkSize = individuals.size() / numThreads;
int start = 0, end = 0;
Thread[] threads = new Thread[numThreads];
Map<String, double[]> tagStats = null; // holds the tags (if not setting
// them)
// Are we setting the tags or collecting them?
if (!doSetTags) {
if (tagMethod == 0) {
tagStats = new HashMap<String, double[]>();
}
}
// Fire off all the tagging threads
for (int i = 0; i < numThreads; i++) {
start = i * chunkSize;
// Set the end index (account for uneven popSize/numThreads)
end = (i == numThreads - 1) ? individuals.size()
: (start + chunkSize); // subList end is exclusive so no -1
TagThread thread = new TagThread(individuals.subList(start, end),
tagStats, tagLevel, tagDepth, doSetTags, tagMethod);
threads[i] = new Thread(thread);
threads[i].start();
}
// Join them all together
for (int i = 0; i < numThreads; i++) {
try {
threads[i].join();
} catch (InterruptedException e) {
logger.error(e);
System.exit(1);
}
}
// We need to set the density and average fitness if collecting stats
if (!doSetTags) {
if (tagMethod == 0) {
for (String tag : tagStats.keySet()) {
double[] stats = tagStats.get(tag);
// First set the average fitness
stats[1] /= stats[0];
// Now transform the counts into a density
stats[0] /= individuals.size();
}
}
}
return tagStats;
}
| Map<String, double[]> function(Vector<Individual> individuals, int tagLevel, int tagDepth, boolean doSetTags, int tagMethod) { int numThreads = config.getNumThreads(); int chunkSize = individuals.size() / numThreads; int start = 0, end = 0; Thread[] threads = new Thread[numThreads]; Map<String, double[]> tagStats = null; if (!doSetTags) { if (tagMethod == 0) { tagStats = new HashMap<String, double[]>(); } } for (int i = 0; i < numThreads; i++) { start = i * chunkSize; end = (i == numThreads - 1) ? individuals.size() : (start + chunkSize); TagThread thread = new TagThread(individuals.subList(start, end), tagStats, tagLevel, tagDepth, doSetTags, tagMethod); threads[i] = new Thread(thread); threads[i].start(); } for (int i = 0; i < numThreads; i++) { try { threads[i].join(); } catch (InterruptedException e) { logger.error(e); System.exit(1); } } if (!doSetTags) { if (tagMethod == 0) { for (String tag : tagStats.keySet()) { double[] stats = tagStats.get(tag); stats[1] /= stats[0]; stats[0] /= individuals.size(); } } } return tagStats; } | /**
* Convenience method to tag the given collection of individuals.
*
* @param individuals
* the individuals to tag.
*
* @param tagLevel
* the level in the tree to start building the tag
*
* @param tagDepth
* how far down from the tagLevel to go in order to make the tag
*
* @return the set of tags generated, mapped to their densities and average
* fitness, or null if we're setting the tags on the individuals
* instead of collecting them
*
*/ | Convenience method to tag the given collection of individuals | tagIndividuals | {
"repo_name": "burks-pub/gecco2015",
"path": "src/main/java/ec/research/gp/pareto/DiversityUtils.java",
"license": "bsd-2-clause",
"size": 18938
} | [
"ec.research.gp.simple.representation.Individual",
"java.util.HashMap",
"java.util.Map",
"java.util.Vector"
] | import ec.research.gp.simple.representation.Individual; import java.util.HashMap; import java.util.Map; import java.util.Vector; | import ec.research.gp.simple.representation.*; import java.util.*; | [
"ec.research.gp",
"java.util"
] | ec.research.gp; java.util; | 2,601,279 |
public static void obtainTokenForJob(final Connection conn,
User user, Job job)
throws IOException, InterruptedException {
try {
Token<AuthenticationTokenIdentifier> token = obtainToken(conn, user);
if (token == null) {
throw new IOException("No token returned for user " + user.getName());
}
Text clusterId = getClusterId(token);
if (LOG.isDebugEnabled()) {
LOG.debug("Obtained token " + token.getKind().toString() + " for user " +
user.getName() + " on cluster " + clusterId.toString());
}
job.getCredentials().addToken(clusterId, token);
} catch (IOException ioe) {
throw ioe;
} catch (InterruptedException ie) {
throw ie;
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
throw new UndeclaredThrowableException(e,
"Unexpected exception obtaining token for user " + user.getName());
}
} | static void function(final Connection conn, User user, Job job) throws IOException, InterruptedException { try { Token<AuthenticationTokenIdentifier> token = obtainToken(conn, user); if (token == null) { throw new IOException(STR + user.getName()); } Text clusterId = getClusterId(token); if (LOG.isDebugEnabled()) { LOG.debug(STR + token.getKind().toString() + STR + user.getName() + STR + clusterId.toString()); } job.getCredentials().addToken(clusterId, token); } catch (IOException ioe) { throw ioe; } catch (InterruptedException ie) { throw ie; } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new UndeclaredThrowableException(e, STR + user.getName()); } } | /**
* Obtain an authentication token on behalf of the given user and add it to
* the credentials for the given map reduce job.
* @param conn The HBase cluster connection
* @param user The user for whom to obtain the token
* @param job The job instance in which the token should be stored
* @throws IOException If making a remote call to the authentication service fails
* @throws InterruptedException If executing as the given user is interrupted
*/ | Obtain an authentication token on behalf of the given user and add it to the credentials for the given map reduce job | obtainTokenForJob | {
"repo_name": "Eshcar/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java",
"license": "apache-2.0",
"size": 13148
} | [
"java.io.IOException",
"java.lang.reflect.UndeclaredThrowableException",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.security.User",
"org.apache.hadoop.io.Text",
"org.apache.hadoop.mapreduce.Job",
"org.apache.hadoop.security.token.Token"
] | import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.security.token.Token; | import java.io.*; import java.lang.reflect.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.security.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.security.token.*; | [
"java.io",
"java.lang",
"org.apache.hadoop"
] | java.io; java.lang; org.apache.hadoop; | 841,890 |
default void addDocumentListener(@NotNull DocumentListener listener) {
} | default void addDocumentListener(@NotNull DocumentListener listener) { } | /**
* Adds a listener for receiving notifications about changes in the document content.
*
* @param listener the listener instance.
*/ | Adds a listener for receiving notifications about changes in the document content | addDocumentListener | {
"repo_name": "ThiagoGarciaAlves/intellij-community",
"path": "platform/core-api/src/com/intellij/openapi/editor/Document.java",
"license": "apache-2.0",
"size": 13276
} | [
"com.intellij.openapi.editor.event.DocumentListener",
"org.jetbrains.annotations.NotNull"
] | import com.intellij.openapi.editor.event.DocumentListener; import org.jetbrains.annotations.NotNull; | import com.intellij.openapi.editor.event.*; import org.jetbrains.annotations.*; | [
"com.intellij.openapi",
"org.jetbrains.annotations"
] | com.intellij.openapi; org.jetbrains.annotations; | 1,758,519 |
public static Calendar getCalendarInstance(final Date date) {
Calendar instance;
if (date instanceof DateTime) {
final DateTime dateTime = (DateTime) date;
if (dateTime.getTimeZone() != null) {
instance = Calendar.getInstance(dateTime.getTimeZone());
}
else if (dateTime.isUtc()) {
instance = Calendar.getInstance(TimeZones.getUtcTimeZone());
}
else {
// a date-time without a timezone but not UTC is floating
instance = Calendar.getInstance();
}
}
else {
instance = Calendar.getInstance(TimeZones.getDateTimeZone());
}
return instance;
} | static Calendar function(final Date date) { Calendar instance; if (date instanceof DateTime) { final DateTime dateTime = (DateTime) date; if (dateTime.getTimeZone() != null) { instance = Calendar.getInstance(dateTime.getTimeZone()); } else if (dateTime.isUtc()) { instance = Calendar.getInstance(TimeZones.getUtcTimeZone()); } else { instance = Calendar.getInstance(); } } else { instance = Calendar.getInstance(TimeZones.getDateTimeZone()); } return instance; } | /**
* Returns an instance of <code>java.util.Calendar</code> that is suitably
* initialised for working with the specified date.
* @param date a date instance
* @return a <code>java.util.Calendar</code>
*/ | Returns an instance of <code>java.util.Calendar</code> that is suitably initialised for working with the specified date | getCalendarInstance | {
"repo_name": "ical4j/ical4j",
"path": "src/main/java/net/fortuna/ical4j/util/Dates.java",
"license": "bsd-3-clause",
"size": 12348
} | [
"java.util.Calendar",
"net.fortuna.ical4j.model.Date",
"net.fortuna.ical4j.model.DateTime"
] | import java.util.Calendar; import net.fortuna.ical4j.model.Date; import net.fortuna.ical4j.model.DateTime; | import java.util.*; import net.fortuna.ical4j.model.*; | [
"java.util",
"net.fortuna.ical4j"
] | java.util; net.fortuna.ical4j; | 2,877,135 |
@DELETE
@Produces(MediaType.TEXT_PLAIN)
public String unsubscribe(final UserDto toDeleteUser) {
return "Erf, you want to quit !";
} | @Produces(MediaType.TEXT_PLAIN) String function(final UserDto toDeleteUser) { return STR; } | /**
* Method handling HTTP GET requests. The returned object will be sent to
* the client as "text/plain" media type.
*
* @return String that will be returned as a text/plain response.
*/ | Method handling HTTP GET requests. The returned object will be sent to the client as "text/plain" media type | unsubscribe | {
"repo_name": "herveDarritchon/soebackend",
"path": "src/main/java/fr/hervedarritchon/soe/soebackend/api/UserResource.java",
"license": "gpl-2.0",
"size": 2788
} | [
"fr.hervedarritchon.soe.soebackend.api.model.UserDto",
"javax.ws.rs.Produces",
"javax.ws.rs.core.MediaType"
] | import fr.hervedarritchon.soe.soebackend.api.model.UserDto; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; | import fr.hervedarritchon.soe.soebackend.api.model.*; import javax.ws.rs.*; import javax.ws.rs.core.*; | [
"fr.hervedarritchon.soe",
"javax.ws"
] | fr.hervedarritchon.soe; javax.ws; | 1,381,195 |
@Test
public void testFinalizeWithIntermediateState() throws Exception {
List<File> files = generateTemporaryFilesForFinalize(3);
SimpleSink.SimpleWriteOperation writeOp = buildWriteOperationForFinalize(false);
runFinalize(writeOp, files, false);
// create a temporary file
tmpFolder.newFile(
baseTemporaryFilename + FileBasedWriteOperation.TEMPORARY_FILENAME_SEPARATOR + "1");
runFinalize(writeOp, files, false);
} | void function() throws Exception { List<File> files = generateTemporaryFilesForFinalize(3); SimpleSink.SimpleWriteOperation writeOp = buildWriteOperationForFinalize(false); runFinalize(writeOp, files, false); tmpFolder.newFile( baseTemporaryFilename + FileBasedWriteOperation.TEMPORARY_FILENAME_SEPARATOR + "1"); runFinalize(writeOp, files, false); } | /**
* Finalize can be called when some temporary files do not exist and output files exist.
*/ | Finalize can be called when some temporary files do not exist and output files exist | testFinalizeWithIntermediateState | {
"repo_name": "shakamunyi/beam",
"path": "sdks/java/core/src/test/java/com/google/cloud/dataflow/sdk/io/FileBasedSinkTest.java",
"license": "apache-2.0",
"size": 18193
} | [
"com.google.cloud.dataflow.sdk.io.FileBasedSink",
"java.io.File",
"java.util.List"
] | import com.google.cloud.dataflow.sdk.io.FileBasedSink; import java.io.File; import java.util.List; | import com.google.cloud.dataflow.sdk.io.*; import java.io.*; import java.util.*; | [
"com.google.cloud",
"java.io",
"java.util"
] | com.google.cloud; java.io; java.util; | 2,651,519 |
public void createFeed(StorageFeedWrapper wrapper) throws StorageException {
this.lock.lock();
try {
this.forceWriteDocuments.add(wrapper.getLuceneDocument());
storageModified();
} finally {
this.lock.unlock();
}
} | void function(StorageFeedWrapper wrapper) throws StorageException { this.lock.lock(); try { this.forceWriteDocuments.add(wrapper.getLuceneDocument()); storageModified(); } finally { this.lock.unlock(); } } | /**
* Adds a new Feed to the storage. Feed action will be not buffered. Call to
* this method forces the index to be written.
*
* @param wrapper -
* the wrapper containing the feed;
* @throws StorageException -
* if the feed can not be written
*/ | Adds a new Feed to the storage. Feed action will be not buffered. Call to this method forces the index to be written | createFeed | {
"repo_name": "adichad/lucene-new",
"path": "contrib/gdata-server/src/core/src/java/org/apache/lucene/gdata/storage/lucenestorage/StorageModifier.java",
"license": "apache-2.0",
"size": 18713
} | [
"org.apache.lucene.gdata.storage.StorageException"
] | import org.apache.lucene.gdata.storage.StorageException; | import org.apache.lucene.gdata.storage.*; | [
"org.apache.lucene"
] | org.apache.lucene; | 453,034 |
public void selectByValue(final String... values) {
executeAction(new PageObjectCallback() { | void function(final String... values) { executeAction(new PageObjectCallback() { | /**
* Selects all options by their value. Giving no value as parameters will
* simply deselect all options. If this {@link Select select} is not a
* multi-select field and more than one value is given, every value will be
* selected in sequence.
*
* @param values the values of the options which should be selected
* @throws NoSuchElementException in case there is no option with the given
* value(s)
* @since 0.9.0
*/ | Selects all options by their value. Giving no value as parameters will simply deselect all options. If this <code>Select select</code> is not a multi-select field and more than one value is given, every value will be selected in sequence | selectByValue | {
"repo_name": "dbe-it/webtester-core",
"path": "webtester-core/src/main/java/info/novatec/testit/webtester/pageobjects/Select.java",
"license": "apache-2.0",
"size": 13390
} | [
"info.novatec.testit.webtester.api.callbacks.PageObjectCallback"
] | import info.novatec.testit.webtester.api.callbacks.PageObjectCallback; | import info.novatec.testit.webtester.api.callbacks.*; | [
"info.novatec.testit"
] | info.novatec.testit; | 4,076 |
Set<String> getCommands(); | Set<String> getCommands(); | /**
* Returns the recognizable commands.
* @return the recognizable commands
*/ | Returns the recognizable commands | getCommands | {
"repo_name": "ashigeru/asakusafw-compiler",
"path": "bridge-project/runtime/src/main/java/com/asakusafw/bridge/launch/LaunchOption.java",
"license": "apache-2.0",
"size": 1503
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 996,782 |
URLClassLoader sysloader = (URLClassLoader) ClassLoader.getSystemClassLoader();
Class sysclass = URLClassLoader.class;
java.lang.reflect.Method method = sysclass.getDeclaredMethod("addURL", new Class[] {URL.class});
method.setAccessible(true);
method.invoke(sysloader, new Object[]{url});
}
| URLClassLoader sysloader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Class sysclass = URLClassLoader.class; java.lang.reflect.Method method = sysclass.getDeclaredMethod(STR, new Class[] {URL.class}); method.setAccessible(true); method.invoke(sysloader, new Object[]{url}); } | /**
* Adds the specified URL (for a jar or a directory) to the System
* ClassLoader. This code was written by antony_miguel and posted on
* http://forum.java.sun.com/thread.jsp?forum=32&thread=300557&message=1191210
* I assume it has been placed in the public domain.
*
* @param url the URL of the resource (directory or jar) to add to the
* System classpath
* @throws Exception if anything goes wrong. The most likely culprit, should
* this ever arise, would be that your VM is not using a URLClassLoader as the
* System ClassLoader. This would result in a ClassClastException that you
* probably can't do much about.
*/ | Adds the specified URL (for a jar or a directory) to the System ClassLoader. This code was written by antony_miguel and posted on HREF I assume it has been placed in the public domain | addToSystemClassLoader | {
"repo_name": "benhyland/nailgun",
"path": "nailgun-server/src/main/java/com/martiansoftware/nailgun/builtins/NGClasspath.java",
"license": "apache-2.0",
"size": 2851
} | [
"java.net.URLClassLoader"
] | import java.net.URLClassLoader; | import java.net.*; | [
"java.net"
] | java.net; | 1,838,896 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.