method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
@GlueMethod(version = 1) public static InputStream resource(@GlueParam(name = "name") String name, @GlueParam(name = "script") String script) throws IOException, ParseException { if (script != null) { return ScriptUtils.getRoot(ScriptRuntime.getRuntime().getScript().getRepository()).getScript(script).getResource(name); } return getInputStream(name); }
@GlueMethod(version = 1) static InputStream function(@GlueParam(name = "name") String name, @GlueParam(name = STR) String script) throws IOException, ParseException { if (script != null) { return ScriptUtils.getRoot(ScriptRuntime.getRuntime().getScript().getRepository()).getScript(script).getResource(name); } return getInputStream(name); }
/** * Loads a resource as inputstream */
Loads a resource as inputstream
resource
{ "repo_name": "nablex/glue", "path": "src/main/java/be/nabu/glue/core/impl/methods/ScriptMethods.java", "license": "lgpl-2.1", "size": 28364 }
[ "be.nabu.glue.annotations.GlueMethod", "be.nabu.glue.annotations.GlueParam", "be.nabu.glue.utils.ScriptRuntime", "be.nabu.glue.utils.ScriptUtils", "java.io.IOException", "java.io.InputStream", "java.text.ParseException" ]
import be.nabu.glue.annotations.GlueMethod; import be.nabu.glue.annotations.GlueParam; import be.nabu.glue.utils.ScriptRuntime; import be.nabu.glue.utils.ScriptUtils; import java.io.IOException; import java.io.InputStream; import java.text.ParseException;
import be.nabu.glue.annotations.*; import be.nabu.glue.utils.*; import java.io.*; import java.text.*;
[ "be.nabu.glue", "java.io", "java.text" ]
be.nabu.glue; java.io; java.text;
668,483
@ServiceMethod(returns = ReturnType.SINGLE) public ElasticPoolInner createOrUpdate( String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters, Context context) { return createOrUpdateAsync(resourceGroupName, serverName, elasticPoolName, parameters, context).block(); }
@ServiceMethod(returns = ReturnType.SINGLE) ElasticPoolInner function( String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolInner parameters, Context context) { return createOrUpdateAsync(resourceGroupName, serverName, elasticPoolName, parameters, context).block(); }
/** * Creates or updates an elastic pool. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param serverName The name of the server. * @param elasticPoolName The name of the elastic pool. * @param parameters An elastic pool. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an elastic pool. */
Creates or updates an elastic pool
createOrUpdate
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/implementation/ElasticPoolsClientImpl.java", "license": "mit", "size": 108421 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.util.Context", "com.azure.resourcemanager.sql.fluent.models.ElasticPoolInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; import com.azure.resourcemanager.sql.fluent.models.ElasticPoolInner;
import com.azure.core.annotation.*; import com.azure.core.util.*; import com.azure.resourcemanager.sql.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
330,994
ScaleTwoDecimal getTotalCostLimit(Award award);
ScaleTwoDecimal getTotalCostLimit(Award award);
/** * * Get the total cost limit from the award. Returns the less of the obligated distributable amount or the total cost limit. */
Get the total cost limit from the award. Returns the less of the obligated distributable amount or the total cost limit
getTotalCostLimit
{ "repo_name": "mukadder/kc", "path": "coeus-impl/src/main/java/org/kuali/kra/award/budget/AwardBudgetService.java", "license": "agpl-3.0", "size": 5008 }
[ "org.kuali.coeus.sys.api.model.ScaleTwoDecimal", "org.kuali.kra.award.home.Award" ]
import org.kuali.coeus.sys.api.model.ScaleTwoDecimal; import org.kuali.kra.award.home.Award;
import org.kuali.coeus.sys.api.model.*; import org.kuali.kra.award.home.*;
[ "org.kuali.coeus", "org.kuali.kra" ]
org.kuali.coeus; org.kuali.kra;
2,913,109
public static DatasetGraph buildDataset(Item item) { return buildDataset(DatasetGraphFactory.createTxnMem(), item); }
static DatasetGraph function(Item item) { return buildDataset(DatasetGraphFactory.createTxnMem(), item); }
/** * Format: (dataset (graph ...)) (quad ...) (g s p o) (graph IRIa ...)) (graph * IRIb ...)) ) (graph ...) is an abbreviation for a dataset with a default graph * and no named graphs. */
Format: (dataset (graph ...)) (quad ...) (g s p o) (graph IRIa ...)) (graph IRIb ...)) ) (graph ...) is an abbreviation for a dataset with a default graph and no named graphs
buildDataset
{ "repo_name": "apache/jena", "path": "jena-arq/src/main/java/org/apache/jena/sparql/sse/builders/BuilderGraph.java", "license": "apache-2.0", "size": 7512 }
[ "org.apache.jena.sparql.core.DatasetGraph", "org.apache.jena.sparql.core.DatasetGraphFactory", "org.apache.jena.sparql.sse.Item" ]
import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphFactory; import org.apache.jena.sparql.sse.Item;
import org.apache.jena.sparql.core.*; import org.apache.jena.sparql.sse.*;
[ "org.apache.jena" ]
org.apache.jena;
202,907
public static boolean isTrait(final Class clazz) { return clazz!=null && clazz.getAnnotation(Trait.class)!=null; }
static boolean function(final Class clazz) { return clazz!=null && clazz.getAnnotation(Trait.class)!=null; }
/** * Returns true if the specified class is a trait. * @param clazz a class to test * @return true if the classnode represents a trait */
Returns true if the specified class is a trait
isTrait
{ "repo_name": "bsideup/incubator-groovy", "path": "src/main/org/codehaus/groovy/transform/trait/Traits.java", "license": "apache-2.0", "size": 16526 }
[ "groovy.transform.Trait" ]
import groovy.transform.Trait;
import groovy.transform.*;
[ "groovy.transform" ]
groovy.transform;
960,356
private Document addRootNeighbor(Document problem2) { ///@todo for this I will need to know the (or a) root of the problem return null; }
Document function(Document problem2) { return null; }
/** adds a neighbor to the root of the problem * @param problem2 original problem * @return modified problem */
adds a neighbor to the root of the problem
addRootNeighbor
{ "repo_name": "heniancheng/FRODO", "path": "src/frodo2/algorithms/dpop/restart/test/TestSDPOP.java", "license": "agpl-3.0", "size": 11609 }
[ "org.jdom2.Document" ]
import org.jdom2.Document;
import org.jdom2.*;
[ "org.jdom2" ]
org.jdom2;
928
protected void addServiceNamePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ServiceControl_serviceName_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ServiceControl_serviceName_feature", "_UI_ServiceControl_type"), CorePackage.Literals.SERVICE_CONTROL__SERVICE_NAME, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), CorePackage.Literals.SERVICE_CONTROL__SERVICE_NAME, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
/** * This adds a property descriptor for the Service Name feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This adds a property descriptor for the Service Name feature.
addServiceNamePropertyDescriptor
{ "repo_name": "aciancone/klapersuite", "path": "klapersuite.metamodel.klaper.edit/src/klaper/core/provider/ServiceControlItemProvider.java", "license": "epl-1.0", "size": 9316 }
[ "org.eclipse.emf.edit.provider.ComposeableAdapterFactory", "org.eclipse.emf.edit.provider.ItemPropertyDescriptor" ]
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,780,309
public static boolean isLeaderOfCurrentGroup() { ExperimenterData exp = getUserDetails(); Set groups = getGroupsLeaderOf(); if (groups.size() == 0) return false; GroupData group = exp.getDefaultGroup(); Iterator i = groups.iterator(); GroupData g; while (i.hasNext()) { g = (GroupData) i.next(); if (g.getId() == group.getId()) return true; } return false; }
static boolean function() { ExperimenterData exp = getUserDetails(); Set groups = getGroupsLeaderOf(); if (groups.size() == 0) return false; GroupData group = exp.getDefaultGroup(); Iterator i = groups.iterator(); GroupData g; while (i.hasNext()) { g = (GroupData) i.next(); if (g.getId() == group.getId()) return true; } return false; }
/** * Returns <code>true</code> if the user currently logged in * is an owner of the current group, <code>false</code> otherwise. * * @return See above. */
Returns <code>true</code> if the user currently logged in is an owner of the current group, <code>false</code> otherwise
isLeaderOfCurrentGroup
{ "repo_name": "jballanc/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/dataBrowser/DataBrowserAgent.java", "license": "gpl-2.0", "size": 11482 }
[ "java.util.Iterator", "java.util.Set" ]
import java.util.Iterator; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
265,518
public ApplicationContext getReadOnlyApplicationContext() { this.lock.readLock().lock(); try { return ((ApplicationContextState) getState(false)).getApplicationContext(); } finally { this.lock.readLock().unlock(); } }
ApplicationContext function() { this.lock.readLock().lock(); try { return ((ApplicationContextState) getState(false)).getApplicationContext(); } finally { this.lock.readLock().unlock(); } }
/** * Gets the application context. Will not start a subsystem. * * @return the application context or null */
Gets the application context. Will not start a subsystem
getReadOnlyApplicationContext
{ "repo_name": "Kast0rTr0y/community-edition", "path": "projects/repository/source/java/org/alfresco/repo/management/subsystems/ChildApplicationContextFactory.java", "license": "lgpl-3.0", "size": 37252 }
[ "org.springframework.context.ApplicationContext" ]
import org.springframework.context.ApplicationContext;
import org.springframework.context.*;
[ "org.springframework.context" ]
org.springframework.context;
759,205
@Override void repair() throws Exception { index.removeAllDocuments(id); try { NodeState node = (NodeState) stateMgr.getItemState(id); log.info("Re-indexing node with wrong parent in index: " + getPath(node)); Document d = index.createDocument(node); index.addDocument(d); nodeIds.put(node.getNodeId(), Boolean.TRUE); } catch (NoSuchItemStateException e) { log.info("Not re-indexing node with wrong parent because node no longer exists"); } }
void repair() throws Exception { index.removeAllDocuments(id); try { NodeState node = (NodeState) stateMgr.getItemState(id); log.info(STR + getPath(node)); Document d = index.createDocument(node); index.addDocument(d); nodeIds.put(node.getNodeId(), Boolean.TRUE); } catch (NoSuchItemStateException e) { log.info(STR); } }
/** * Reindex node. */
Reindex node
repair
{ "repo_name": "Kast0rTr0y/jackrabbit", "path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ConsistencyCheck.java", "license": "apache-2.0", "size": 28745 }
[ "org.apache.jackrabbit.core.state.NoSuchItemStateException", "org.apache.jackrabbit.core.state.NodeState", "org.apache.lucene.document.Document" ]
import org.apache.jackrabbit.core.state.NoSuchItemStateException; import org.apache.jackrabbit.core.state.NodeState; import org.apache.lucene.document.Document;
import org.apache.jackrabbit.core.state.*; import org.apache.lucene.document.*;
[ "org.apache.jackrabbit", "org.apache.lucene" ]
org.apache.jackrabbit; org.apache.lucene;
233,714
@Override public Principal authenticate(String username, String credentials) { String serverCredentials = getPassword(username); boolean validated ; if ( serverCredentials == null ) { validated = false; } else if(hasMessageDigest()) { validated = serverCredentials.equalsIgnoreCase(digest(credentials)); } else { validated = serverCredentials.equals(credentials); } if(! validated ) { if (containerLog.isTraceEnabled()) { containerLog.trace(sm.getString("realmBase.authenticateFailure", username)); } return null; } if (containerLog.isTraceEnabled()) { containerLog.trace(sm.getString("realmBase.authenticateSuccess", username)); } return getPrincipal(username); }
Principal function(String username, String credentials) { String serverCredentials = getPassword(username); boolean validated ; if ( serverCredentials == null ) { validated = false; } else if(hasMessageDigest()) { validated = serverCredentials.equalsIgnoreCase(digest(credentials)); } else { validated = serverCredentials.equals(credentials); } if(! validated ) { if (containerLog.isTraceEnabled()) { containerLog.trace(sm.getString(STR, username)); } return null; } if (containerLog.isTraceEnabled()) { containerLog.trace(sm.getString(STR, username)); } return getPrincipal(username); }
/** * Return the Principal associated with the specified username and * credentials, if there is one; otherwise return <code>null</code>. * * @param username Username of the Principal to look up * @param credentials Password or other credentials to use in * authenticating this username */
Return the Principal associated with the specified username and credentials, if there is one; otherwise return <code>null</code>
authenticate
{ "repo_name": "pistolove/sourcecode4junit", "path": "Source4Tomcat/src/org/apache/catalina/realm/RealmBase.java", "license": "apache-2.0", "size": 48116 }
[ "java.security.Principal" ]
import java.security.Principal;
import java.security.*;
[ "java.security" ]
java.security;
2,430,682
public void updateProfileStateFromService(int pjsuaId) throws SameThreadException { if (!created) { return; } long accId = getAccountIdForPjsipId(service, pjsuaId); Log.d(THIS_FILE, "Update profile from service for " + pjsuaId + " aka in db " + accId); if (accId != SipProfile.INVALID_ID) { int success = pjsuaConstants.PJ_FALSE; pjsua_acc_info pjAccountInfo; pjAccountInfo = new pjsua_acc_info(); success = pjsua.acc_get_info(pjsuaId, pjAccountInfo); if (success == pjsuaConstants.PJ_SUCCESS && pjAccountInfo != null) { ContentValues cv = new ContentValues(); try { // Should be fine : status code are coherent with RFC // status codes cv.put(SipProfileState.STATUS_CODE, pjAccountInfo.getStatus().swigValue()); } catch (IllegalArgumentException e) { cv.put(SipProfileState.STATUS_CODE, SipCallSession.StatusCode.INTERNAL_SERVER_ERROR); } cv.put(SipProfileState.STATUS_TEXT, pjStrToString(pjAccountInfo.getStatus_text())); cv.put(SipProfileState.EXPIRES, pjAccountInfo.getExpires()); service.getContentResolver().update( ContentUris.withAppendedId(SipProfile.ACCOUNT_STATUS_ID_URI_BASE, accId), cv, null, null); Log.d(THIS_FILE, "Profile state UP : " + cv); } } else { Log.e(THIS_FILE, "Trying to update not added account " + pjsuaId); } }
void function(int pjsuaId) throws SameThreadException { if (!created) { return; } long accId = getAccountIdForPjsipId(service, pjsuaId); Log.d(THIS_FILE, STR + pjsuaId + STR + accId); if (accId != SipProfile.INVALID_ID) { int success = pjsuaConstants.PJ_FALSE; pjsua_acc_info pjAccountInfo; pjAccountInfo = new pjsua_acc_info(); success = pjsua.acc_get_info(pjsuaId, pjAccountInfo); if (success == pjsuaConstants.PJ_SUCCESS && pjAccountInfo != null) { ContentValues cv = new ContentValues(); try { cv.put(SipProfileState.STATUS_CODE, pjAccountInfo.getStatus().swigValue()); } catch (IllegalArgumentException e) { cv.put(SipProfileState.STATUS_CODE, SipCallSession.StatusCode.INTERNAL_SERVER_ERROR); } cv.put(SipProfileState.STATUS_TEXT, pjStrToString(pjAccountInfo.getStatus_text())); cv.put(SipProfileState.EXPIRES, pjAccountInfo.getExpires()); service.getContentResolver().update( ContentUris.withAppendedId(SipProfile.ACCOUNT_STATUS_ID_URI_BASE, accId), cv, null, null); Log.d(THIS_FILE, STR + cv); } } else { Log.e(THIS_FILE, STR + pjsuaId); } }
/** * Synchronize content provider backend from pjsip stack * * @param pjsuaId the pjsua id of the account to synchronize * @throws SameThreadException */
Synchronize content provider backend from pjsip stack
updateProfileStateFromService
{ "repo_name": "tmxdyf/CSipSimple-Import-", "path": "src/com/csipsimple/pjsip/PjSipService.java", "license": "lgpl-3.0", "size": 97586 }
[ "android.content.ContentUris", "android.content.ContentValues", "com.csipsimple.api.SipCallSession", "com.csipsimple.api.SipProfile", "com.csipsimple.api.SipProfileState", "com.csipsimple.service.SipService", "com.csipsimple.utils.Log" ]
import android.content.ContentUris; import android.content.ContentValues; import com.csipsimple.api.SipCallSession; import com.csipsimple.api.SipProfile; import com.csipsimple.api.SipProfileState; import com.csipsimple.service.SipService; import com.csipsimple.utils.Log;
import android.content.*; import com.csipsimple.api.*; import com.csipsimple.service.*; import com.csipsimple.utils.*;
[ "android.content", "com.csipsimple.api", "com.csipsimple.service", "com.csipsimple.utils" ]
android.content; com.csipsimple.api; com.csipsimple.service; com.csipsimple.utils;
2,456,614
public static void main(String[] args) { if (args.length == 0) { System.out.println("You must specify a program file name."); System.out.println("Usage: java BF program.bf"); return; } File programFile = new File(args[0]); try { Scanner scanner = new Scanner(programFile); StringBuilder sb = new StringBuilder(); while (scanner.hasNextLine()) { String line = scanner.nextLine(); sb.append(line); } scanner.close(); BF bf = new BF(sb.toString().toCharArray()); bf.execute(); } catch (FileNotFoundException fnfe) { System.out.println("The program file specified could not be found."); System.out.println("Usage: java BF program.bf"); } catch(IOException ioe) { System.out.println("There was a problem reading or writing to the I/O stream."); } }
static void function(String[] args) { if (args.length == 0) { System.out.println(STR); System.out.println(STR); return; } File programFile = new File(args[0]); try { Scanner scanner = new Scanner(programFile); StringBuilder sb = new StringBuilder(); while (scanner.hasNextLine()) { String line = scanner.nextLine(); sb.append(line); } scanner.close(); BF bf = new BF(sb.toString().toCharArray()); bf.execute(); } catch (FileNotFoundException fnfe) { System.out.println(STR); System.out.println(STR); } catch(IOException ioe) { System.out.println(STR); } }
/** * A command-line brainfuck interpreter. * Takes a .bf program file name as the only argument. * @param args */
A command-line brainfuck interpreter. Takes a .bf program file name as the only argument
main
{ "repo_name": "BillCruise/BF4J", "path": "src/bf4j/BF.java", "license": "mit", "size": 3676 }
[ "java.io.File", "java.io.FileNotFoundException", "java.io.IOException", "java.util.Scanner" ]
import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Scanner;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,782,554
public Builder addNames(String names) { com.ibm.cloud.sdk.core.util.Validator.notNull(names, "names cannot be null"); if (this.names == null) { this.names = new ArrayList<String>(); } this.names.add(names); return this; }
Builder function(String names) { com.ibm.cloud.sdk.core.util.Validator.notNull(names, STR); if (this.names == null) { this.names = new ArrayList<String>(); } this.names.add(names); return this; }
/** * Adds an names to names. * * @param names the new names * @return the WordStyle builder */
Adds an names to names
addNames
{ "repo_name": "watson-developer-cloud/java-sdk", "path": "discovery/src/main/java/com/ibm/watson/discovery/v1/model/WordStyle.java", "license": "apache-2.0", "size": 2814 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
1,383
@Override() public java.lang.Class<?> getJavaClass( ) { return org.opennms.netmgt.config.datacollection.ResourceType.class; }
@Override() java.lang.Class<?> function( ) { return org.opennms.netmgt.config.datacollection.ResourceType.class; }
/** * Method getJavaClass. * * @return the Java class represented by this descriptor. */
Method getJavaClass
getJavaClass
{ "repo_name": "rfdrake/opennms", "path": "opennms-config-model/src/main/java/org/opennms/netmgt/config/datacollection/descriptors/ResourceTypeDescriptor.java", "license": "gpl-2.0", "size": 14818 }
[ "org.opennms.netmgt.config.datacollection.ResourceType" ]
import org.opennms.netmgt.config.datacollection.ResourceType;
import org.opennms.netmgt.config.datacollection.*;
[ "org.opennms.netmgt" ]
org.opennms.netmgt;
1,480,226
while(true) { Random random = new Random(); for(int i = 0; i < 10; i++) { int queSensor = random.nextInt(sensores.size()); if(random.nextInt(101) > 49) // 51% probabilidad de ocupar, 49% de liberar ocuparCafeteria(sensores.get(queSensor)); else liberarCafeteria(sensores.get(queSensor)); } long waitInMillis = random.nextInt(15000); try { Thread.sleep(5000+waitInMillis); } catch (InterruptedException e) { System.err.println("Error durante la simulación: " + e.getMessage()); } } } private static void ocuparCafeteria(Sensor sensor) { sensor.entrar(); }
while(true) { Random random = new Random(); for(int i = 0; i < 10; i++) { int queSensor = random.nextInt(sensores.size()); if(random.nextInt(101) > 49) ocuparCafeteria(sensores.get(queSensor)); else liberarCafeteria(sensores.get(queSensor)); } long waitInMillis = random.nextInt(15000); try { Thread.sleep(5000+waitInMillis); } catch (InterruptedException e) { System.err.println(STR + e.getMessage()); } } } private static void ocuparCafeteria(Sensor sensor) { sensor.entrar(); }
/** * Bucle indefinido que cada un tiempo entre 5 y 20 segundos rellena o vacia 10 huecos * de cafeterias distintas. Se presupone que cada cafeteria posee un sensor distinto. */
Bucle indefinido que cada un tiempo entre 5 y 20 segundos rellena o vacia 10 huecos de cafeterias distintas. Se presupone que cada cafeteria posee un sensor distinto
run
{ "repo_name": "UNIZAR-30249-2016-Opgods/Server", "path": "src/main/java/rest/aplicacion/simulacion/SimularCafeterias.java", "license": "apache-2.0", "size": 1859 }
[ "java.util.Random", "rest.infraestructura.Sensor" ]
import java.util.Random; import rest.infraestructura.Sensor;
import java.util.*; import rest.infraestructura.*;
[ "java.util", "rest.infraestructura" ]
java.util; rest.infraestructura;
1,976,397
protected boolean destroyService(final ServiceItem serviceItem) throws Exception { if (serviceItem == null) throw new IllegalArgumentException(); final Object admin; { final Remote proxy = (Remote) serviceItem.service; final Method getAdmin = proxy.getClass().getMethod("getAdmin", new Class[] {}); admin = getAdmin.invoke(proxy, new Object[] {}); } if (admin instanceof DestroyAdmin) { // Destroy the service and its persistent state. log.warn("will destroy() service: " + serviceItem); ((DestroyAdmin) admin).destroy(); return true; } log.warn("Service does not implement " + DestroyAdmin.class + " : " + serviceItem); return false; }
boolean function(final ServiceItem serviceItem) throws Exception { if (serviceItem == null) throw new IllegalArgumentException(); final Object admin; { final Remote proxy = (Remote) serviceItem.service; final Method getAdmin = proxy.getClass().getMethod(STR, new Class[] {}); admin = getAdmin.invoke(proxy, new Object[] {}); } if (admin instanceof DestroyAdmin) { log.warn(STR + serviceItem); ((DestroyAdmin) admin).destroy(); return true; } log.warn(STR + DestroyAdmin.class + STR + serviceItem); return false; }
/** * Sends {@link RemoteDestroyAdmin#destroy()} request to the service. Note * that the service may process the request asynchronously. * * @param serviceItem * The service item. * * @return <code>true</code> if we were able to send that message to the * service. * * @throws Exception * if anything goes wrong. */
Sends <code>RemoteDestroyAdmin#destroy()</code> request to the service. Note that the service may process the request asynchronously
destroyService
{ "repo_name": "rac021/blazegraph_1_5_3_cluster_2_nodes", "path": "bigdata-jini/src/main/java/com/bigdata/service/jini/lookup/AbstractCachingServiceClient.java", "license": "gpl-2.0", "size": 28981 }
[ "com.sun.jini.admin.DestroyAdmin", "java.lang.reflect.Method", "java.rmi.Remote", "net.jini.core.lookup.ServiceItem" ]
import com.sun.jini.admin.DestroyAdmin; import java.lang.reflect.Method; import java.rmi.Remote; import net.jini.core.lookup.ServiceItem;
import com.sun.jini.admin.*; import java.lang.reflect.*; import java.rmi.*; import net.jini.core.lookup.*;
[ "com.sun.jini", "java.lang", "java.rmi", "net.jini.core" ]
com.sun.jini; java.lang; java.rmi; net.jini.core;
2,137,148
public void updateStartersWithStateList(List<IState> list){ for(IState state : list){ if(state.getStateType().equals(DISPLAY_STATE)){ updateStarterDisplay(new DisplayState((DisplayState) state)); } if(state.getStateType().equals(TURTLE_STATE)){ TurtleState turtState = (TurtleState) state; updateStarterState(turtState.getId(), turtState); } } }
void function(List<IState> list){ for(IState state : list){ if(state.getStateType().equals(DISPLAY_STATE)){ updateStarterDisplay(new DisplayState((DisplayState) state)); } if(state.getStateType().equals(TURTLE_STATE)){ TurtleState turtState = (TurtleState) state; updateStarterState(turtState.getId(), turtState); } } }
/** * Update Starter States with a state list. * * @param list the List of Starter States */
Update Starter States with a state list
updateStartersWithStateList
{ "repo_name": "as577/slogo", "path": "src/slogo/model/ActiveMemory.java", "license": "mit", "size": 4395 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,670,431
public Date getStopDate() { if (describeOutput.stoppedRunning == null) { return null; } return new Date(describeOutput.stoppedRunning); }
Date function() { if (describeOutput.stoppedRunning == null) { return null; } return new Date(describeOutput.stoppedRunning); }
/** * Returns the date that the job stopped running, or null if the job has not stopped running * yet. * * @return stop date or null */
Returns the date that the job stopped running, or null if the job has not stopped running yet
getStopDate
{ "repo_name": "johnwallace123/dx-toolkit", "path": "src/java/src/main/java/com/dnanexus/DXJob.java", "license": "apache-2.0", "size": 15446 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
2,558,557
public List<RelayServiceConnectionEntityInner> hybridConnections() { return this.hybridConnections; }
List<RelayServiceConnectionEntityInner> function() { return this.hybridConnections; }
/** * Get the Hybrid Connections summary view. * * @return the hybridConnections value */
Get the Hybrid Connections summary view
hybridConnections
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/appservice/mgmt-v2018_02_01/src/main/java/com/microsoft/azure/management/appservice/v2018_02_01/implementation/NetworkFeaturesInner.java", "license": "mit", "size": 2456 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
474,890
public void save() { if (Telemetry.USAGESTATS.isDenied()) { LogService.getRoot().config( "com.rapidminer.gui.telemetry.accessing_online_services_disallowed"); return; } if (!RapidMiner.getExecutionMode().canAccessFilesystem()) { LogService.getRoot().config( "com.rapidminer.gui.tools.usagestats.UsageStatistics.accessing_file_system_error_bypassing_save"); return; } File file = FileSystemService.getUserConfigFile("usagestats.xml"); try { LogService.getRoot().log(Level.CONFIG, "com.rapidminer.gui.tools.usagestats.UsageStatistics.saving_operator_usage"); XMLTools.stream(getXML(ActionStatisticsCollector.getInstance().getActionStatisticsSnapshot(false)), file, StandardCharsets.UTF_8); } catch (Exception e) { LogService.getRoot().log(Level.WARNING, I18N.getMessage(LogService.getRoot().getResourceBundle(), "com.rapidminer.gui.tools.usagestats.UsageStatistics.saving_operator_usage_error", e), e); } }
void function() { if (Telemetry.USAGESTATS.isDenied()) { LogService.getRoot().config( STR); return; } if (!RapidMiner.getExecutionMode().canAccessFilesystem()) { LogService.getRoot().config( STR); return; } File file = FileSystemService.getUserConfigFile(STR); try { LogService.getRoot().log(Level.CONFIG, STR); XMLTools.stream(getXML(ActionStatisticsCollector.getInstance().getActionStatisticsSnapshot(false)), file, StandardCharsets.UTF_8); } catch (Exception e) { LogService.getRoot().log(Level.WARNING, I18N.getMessage(LogService.getRoot().getResourceBundle(), STR, e), e); } }
/** * Saves the statistics to a user file. */
Saves the statistics to a user file
save
{ "repo_name": "rapidminer/rapidminer-studio", "path": "src/main/java/com/rapidminer/tools/usagestats/UsageStatistics.java", "license": "agpl-3.0", "size": 11222 }
[ "com.rapidminer.RapidMiner", "com.rapidminer.io.process.XMLTools", "com.rapidminer.settings.Telemetry", "com.rapidminer.tools.FileSystemService", "com.rapidminer.tools.LogService", "java.io.File", "java.nio.charset.StandardCharsets", "java.util.logging.Level" ]
import com.rapidminer.RapidMiner; import com.rapidminer.io.process.XMLTools; import com.rapidminer.settings.Telemetry; import com.rapidminer.tools.FileSystemService; import com.rapidminer.tools.LogService; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.logging.Level;
import com.rapidminer.*; import com.rapidminer.io.process.*; import com.rapidminer.settings.*; import com.rapidminer.tools.*; import java.io.*; import java.nio.charset.*; import java.util.logging.*;
[ "com.rapidminer", "com.rapidminer.io", "com.rapidminer.settings", "com.rapidminer.tools", "java.io", "java.nio", "java.util" ]
com.rapidminer; com.rapidminer.io; com.rapidminer.settings; com.rapidminer.tools; java.io; java.nio; java.util;
2,235,744
public static Element getRespShortVideo(WxRespShortVideoEntity respShortVideo) throws DocumentException { Element ele = respEntityFactory(respShortVideo); Element videoEle = ele.addElement("Video"); videoEle.addElement("MediaId").addCDATA(respShortVideo.getVideo().getMediaId()); videoEle.addElement("Title").addCDATA(StringUtils.defaultString(respShortVideo.getVideo().getTitle())); videoEle.addElement("Description").addCDATA(StringUtils.defaultString(respShortVideo.getVideo().getDescription())); return ele; }
static Element function(WxRespShortVideoEntity respShortVideo) throws DocumentException { Element ele = respEntityFactory(respShortVideo); Element videoEle = ele.addElement("Video"); videoEle.addElement(STR).addCDATA(respShortVideo.getVideo().getMediaId()); videoEle.addElement("Title").addCDATA(StringUtils.defaultString(respShortVideo.getVideo().getTitle())); videoEle.addElement(STR).addCDATA(StringUtils.defaultString(respShortVideo.getVideo().getDescription())); return ele; }
/** * <code> * &lt;xml&gt;<br /> * &nbsp;&nbsp;&lt;ToUserName&gt;&lt;![CDATA[toUser]]&gt;&lt;/ToUserName&gt;<br /> * &nbsp;&nbsp;&lt;FromUserName&gt;&lt;![CDATA[fromUser]]&gt;&lt;/FromUserName&gt;<br /> * &nbsp;&nbsp;&lt;CreateTime&gt;12345678&lt;/CreateTime&gt;<br /> * &nbsp;&nbsp;&lt;MsgType&gt;&lt;![CDATA[shortvideo]]&gt;&lt;/MsgType&gt;<br /> * &nbsp;&nbsp;&lt;ShortVideo&gt;<br /> * &nbsp;&nbsp;&nbsp;&nbsp;&lt;MediaId&gt;&lt;![CDATA[media_id]]&gt;&lt;/MediaId&gt;<br /> * &nbsp;&nbsp;&nbsp;&nbsp;&lt;Title&gt;&lt;![CDATA[title]]&gt;&lt;/Title&gt;<br /> * &nbsp;&nbsp;&nbsp;&nbsp;&lt;Description&gt;&lt;![CDATA[description]]&gt;&lt;/Description&gt;<br /> * &nbsp;&nbsp;&lt;/ShortVideo&gt;<br /> * &lt;/xml&gt;<br /> * </code> * * @param respShortVideo * @return * @throws DocumentException */
<code> &lt;xml&gt; &nbsp;&nbsp;&lt;ToUserName&gt;&lt;![CDATA[toUser]]&gt;&lt;/ToUserName&gt; &nbsp;&nbsp;&lt;FromUserName&gt;&lt;![CDATA[fromUser]]&gt;&lt;/FromUserName&gt; &nbsp;&nbsp;&lt;CreateTime&gt;12345678&lt;/CreateTime&gt; &nbsp;&nbsp;&lt;MsgType&gt;&lt;![CDATA[shortvideo]]&gt;&lt;/MsgType&gt; &nbsp;&nbsp;&lt;ShortVideo&gt; &nbsp;&nbsp;&nbsp;&nbsp;&lt;MediaId&gt;&lt;![CDATA[media_id]]&gt;&lt;/MediaId&gt; &nbsp;&nbsp;&nbsp;&nbsp;&lt;Title&gt;&lt;![CDATA[title]]&gt;&lt;/Title&gt; &nbsp;&nbsp;&nbsp;&nbsp;&lt;Description&gt;&lt;![CDATA[description]]&gt;&lt;/Description&gt; &nbsp;&nbsp;&lt;/ShortVideo&gt; &lt;/xml&gt; </code>
getRespShortVideo
{ "repo_name": "onepip/weixin-mp-java", "path": "src/main/java/org/hamster/weixinmp/controller/util/WxXmlUtil.java", "license": "apache-2.0", "size": 24785 }
[ "org.apache.commons.lang3.StringUtils", "org.dom4j.DocumentException", "org.dom4j.Element", "org.hamster.weixinmp.dao.entity.resp.WxRespShortVideoEntity" ]
import org.apache.commons.lang3.StringUtils; import org.dom4j.DocumentException; import org.dom4j.Element; import org.hamster.weixinmp.dao.entity.resp.WxRespShortVideoEntity;
import org.apache.commons.lang3.*; import org.dom4j.*; import org.hamster.weixinmp.dao.entity.resp.*;
[ "org.apache.commons", "org.dom4j", "org.hamster.weixinmp" ]
org.apache.commons; org.dom4j; org.hamster.weixinmp;
774,602
@IgniteSpiConfiguration(optional = true) public TcpDiscoverySpi setForceServerMode(boolean forceSrvMode) { this.forceSrvMode = forceSrvMode; return this; }
@IgniteSpiConfiguration(optional = true) TcpDiscoverySpi function(boolean forceSrvMode) { this.forceSrvMode = forceSrvMode; return this; }
/** * Sets force server mode flag. * <p> * If {@code true} TcpDiscoverySpi is started in server mode regardless * of {@link IgniteConfiguration#isClientMode()}. * * @param forceSrvMode forceServerMode flag. * @return {@code this} for chaining. */
Sets force server mode flag. If true TcpDiscoverySpi is started in server mode regardless of <code>IgniteConfiguration#isClientMode()</code>
setForceServerMode
{ "repo_name": "vsisko/incubator-ignite", "path": "modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/TcpDiscoverySpi.java", "license": "apache-2.0", "size": 68315 }
[ "org.apache.ignite.spi.IgniteSpiConfiguration" ]
import org.apache.ignite.spi.IgniteSpiConfiguration;
import org.apache.ignite.spi.*;
[ "org.apache.ignite" ]
org.apache.ignite;
583,422
private String[] getWeatherDataFromJson(String forecastJsonStr, int numDays) throws JSONException { // These are the names of the JSON objects that need to be extracted. final String OWM_LIST = "list"; final String OWM_WEATHER = "weather"; final String OWM_TEMPERATURE = "temp"; final String OWM_MAX = "max"; final String OWM_MIN = "min"; final String OWM_DESCRIPTION = "main"; JSONObject forecastJson = new JSONObject(forecastJsonStr); JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST); // OWM returns daily forecasts based upon the local time of the city that is being // asked for, which means that we need to know the GMT offset to translate this data // properly. // Since this data is also sent in-order and the first day is always the // current day, we're going to take advantage of that to get a nice // normalized UTC date for all of our weather. Time dayTime = new Time(); dayTime.setToNow(); // we start at the day returned by local time. Otherwise this is a mess. int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff); // now we work exclusively in UTC dayTime = new Time(); String[] resultStrs = new String[numDays]; for(int i = 0; i < weatherArray.length(); i++) { // For now, using the format "Day, description, hi/low" String day; String description; String highAndLow; // Get the JSON object representing the day JSONObject dayForecast = weatherArray.getJSONObject(i); // The date/time is returned as a long. We need to convert that // into something human-readable, since most people won't read "1400356800" as // "this saturday". long dateTime; // Cheating to convert this to UTC time, which is what we want anyhow dateTime = dayTime.setJulianDay(julianStartDay+i); day = getReadableDateString(dateTime); // description is in a child array called "weather", which is 1 element long. JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0); description = weatherObject.getString(OWM_DESCRIPTION); // Temperatures are in a child object called "temp". Try not to name variables // "temp" when working with temperature. It confuses everybody. JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE); double high = temperatureObject.getDouble(OWM_MAX); double low = temperatureObject.getDouble(OWM_MIN); // get prefs here // convert hi/lo based on preferred units // add cap of first letter of unit (C/F), maybe a degree symbol? highAndLow = formatHighLows(high, low); resultStrs[i] = day + " - " + description + " - " + highAndLow; } // for (String s : resultStrs) { // Log.v("tag", "Forecast entry: " + s); // } return resultStrs; }
String[] function(String forecastJsonStr, int numDays) throws JSONException { final String OWM_LIST = "list"; final String OWM_WEATHER = STR; final String OWM_TEMPERATURE = "temp"; final String OWM_MAX = "max"; final String OWM_MIN = "min"; final String OWM_DESCRIPTION = "main"; JSONObject forecastJson = new JSONObject(forecastJsonStr); JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST); Time dayTime = new Time(); dayTime.setToNow(); int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff); dayTime = new Time(); String[] resultStrs = new String[numDays]; for(int i = 0; i < weatherArray.length(); i++) { String day; String description; String highAndLow; JSONObject dayForecast = weatherArray.getJSONObject(i); long dateTime; dateTime = dayTime.setJulianDay(julianStartDay+i); day = getReadableDateString(dateTime); JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0); description = weatherObject.getString(OWM_DESCRIPTION); JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE); double high = temperatureObject.getDouble(OWM_MAX); double low = temperatureObject.getDouble(OWM_MIN); highAndLow = formatHighLows(high, low); resultStrs[i] = day + STR + description + STR + highAndLow; } return resultStrs; }
/** * Take the String representing the complete forecast in JSON Format and * pull out the data we need to construct the Strings needed for the wireframes. * * Fortunately parsing is easy: constructor takes the JSON string and converts it * into an Object hierarchy for us. */
Take the String representing the complete forecast in JSON Format and pull out the data we need to construct the Strings needed for the wireframes. Fortunately parsing is easy: constructor takes the JSON string and converts it into an Object hierarchy for us
getWeatherDataFromJson
{ "repo_name": "iandouglas/udacity_nanodegree_sunshine", "path": "app/src/main/java/com/example/android/sunshine/app/ForecastFragment.java", "license": "apache-2.0", "size": 13298 }
[ "android.text.format.Time", "org.json.JSONArray", "org.json.JSONException", "org.json.JSONObject" ]
import android.text.format.Time; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject;
import android.text.format.*; import org.json.*;
[ "android.text", "org.json" ]
android.text; org.json;
2,236,469
public static void doAsPrivilege(final String methodName, final Filter targetObject, final Class[] targetType, final Object[] targetArguments) throws java.lang.Exception{ Method method = null; Method[] methodsCache = null; if(objectCache.containsKey(targetObject)){ methodsCache = (Method[])objectCache.get(targetObject); method = findMethod(methodsCache, methodName); if (method == null){ method = createMethodAndCacheIt(methodsCache, methodName, targetObject, targetType); } } else { method = createMethodAndCacheIt(methodsCache, methodName, targetObject, targetType); } execute(method, targetObject, targetArguments, null); }
static void function(final String methodName, final Filter targetObject, final Class[] targetType, final Object[] targetArguments) throws java.lang.Exception{ Method method = null; Method[] methodsCache = null; if(objectCache.containsKey(targetObject)){ methodsCache = (Method[])objectCache.get(targetObject); method = findMethod(methodsCache, methodName); if (method == null){ method = createMethodAndCacheIt(methodsCache, methodName, targetObject, targetType); } } else { method = createMethodAndCacheIt(methodsCache, methodName, targetObject, targetType); } execute(method, targetObject, targetArguments, null); }
/** * Perform work as a particular </code>Subject</code>. Here the work * will be granted to a <code>null</code> subject. * * @param methodName the method to apply the security restriction * @param targetObject the <code>Filter</code> on which the method will * be called. * @param targetType <code>Class</code> array used to instanciate a * <code>Method</code> object. * @param targetArguments <code>Object</code> array contains the * runtime parameters instance. */
Perform work as a particular </code>Subject</code>. Here the work will be granted to a <code>null</code> subject
doAsPrivilege
{ "repo_name": "plumer/codana", "path": "tomcat_files/6.0.0/SecurityUtil.java", "license": "mit", "size": 14955 }
[ "java.lang.reflect.Method", "javax.servlet.Filter" ]
import java.lang.reflect.Method; import javax.servlet.Filter;
import java.lang.reflect.*; import javax.servlet.*;
[ "java.lang", "javax.servlet" ]
java.lang; javax.servlet;
1,362,711
public void validateParentIonMass(FacesContext arg0, UIComponent arg1, Object arg2) throws ValidatorException { InterfaceValidators.validateParentIonMass(arg0, arg1, arg2); }
void function(FacesContext arg0, UIComponent arg1, Object arg2) throws ValidatorException { InterfaceValidators.validateParentIonMass(arg0, arg1, arg2); }
/** * Validates the input Tolerance to be a float between 0 and 10000 * * @param arg0 FacesContext of the form * @param arg1 Component of the form * @param arg2 Input of the user in the component arg1 * */
Validates the input Tolerance to be a float between 0 and 10000
validateParentIonMass
{ "repo_name": "albertogilf/ceuMassMediator", "path": "ceu-mass-mediator-v4.0/src/java/presentation/OxidationController.java", "license": "gpl-3.0", "size": 22045 }
[ "javax.faces.component.UIComponent", "javax.faces.context.FacesContext", "javax.faces.validator.ValidatorException" ]
import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.validator.ValidatorException;
import javax.faces.component.*; import javax.faces.context.*; import javax.faces.validator.*;
[ "javax.faces" ]
javax.faces;
1,317,941
private static FakedTrackingVariable analyseCloseableExpression(FlowInfo flowInfo, LocalVariableBinding local, ASTNode location, Expression expression, FakedTrackingVariable previousTracker) { // unwrap uninteresting nodes: while (true) { if (expression instanceof Assignment) expression = ((Assignment)expression).expression; else if (expression instanceof CastExpression) expression = ((CastExpression) expression).expression; else break; } // analyze by node type: if (expression instanceof AllocationExpression) { // allocation expressions already have their tracking variables analyzed by analyseCloseableAllocation(..) FakedTrackingVariable tracker = ((AllocationExpression) expression).closeTracker; if (tracker != null && tracker.originalBinding == null) { // tracker without original binding (unassigned closeable) shouldn't reach here but let's play safe return null; } return tracker; } else if (expression instanceof MessageSend || expression instanceof ArrayReference) { // we *might* be responsible for the resource obtained FakedTrackingVariable tracker = new FakedTrackingVariable(local, location); tracker.globalClosingState |= SHARED_WITH_OUTSIDE; flowInfo.markPotentiallyNullBit(tracker.binding); // shed some doubt return tracker; } else if ( (expression.bits & RestrictiveFlagMASK) == Binding.FIELD ||((expression instanceof QualifiedNameReference) && ((QualifiedNameReference) expression).isFieldAccess())) { // responsibility for this resource probably lies at a higher level FakedTrackingVariable tracker = new FakedTrackingVariable(local, location); tracker.globalClosingState |= OWNED_BY_OUTSIDE; // leave state as UNKNOWN, the bit OWNED_BY_OUTSIDE will prevent spurious warnings return tracker; } if (expression.resolvedType instanceof ReferenceBinding) { ReferenceBinding resourceType = (ReferenceBinding) expression.resolvedType; if (resourceType.hasTypeBit(TypeIds.BitResourceFreeCloseable)) { // (a) resource-free closeable: -> null return null; } } if (local.closeTracker != null) // (c): inner has already been analyzed: -> re-use track var return local.closeTracker; FakedTrackingVariable newTracker = new FakedTrackingVariable(local, location); LocalVariableBinding rhsLocal = expression.localVariableBinding(); if (rhsLocal != null && rhsLocal.isParameter()) { newTracker.globalClosingState |= OWNED_BY_OUTSIDE; } return newTracker; }
static FakedTrackingVariable function(FlowInfo flowInfo, LocalVariableBinding local, ASTNode location, Expression expression, FakedTrackingVariable previousTracker) { while (true) { if (expression instanceof Assignment) expression = ((Assignment)expression).expression; else if (expression instanceof CastExpression) expression = ((CastExpression) expression).expression; else break; } if (expression instanceof AllocationExpression) { FakedTrackingVariable tracker = ((AllocationExpression) expression).closeTracker; if (tracker != null && tracker.originalBinding == null) { return null; } return tracker; } else if (expression instanceof MessageSend expression instanceof ArrayReference) { FakedTrackingVariable tracker = new FakedTrackingVariable(local, location); tracker.globalClosingState = SHARED_WITH_OUTSIDE; flowInfo.markPotentiallyNullBit(tracker.binding); return tracker; } else if ( (expression.bits & RestrictiveFlagMASK) == Binding.FIELD ((expression instanceof QualifiedNameReference) && ((QualifiedNameReference) expression).isFieldAccess())) { FakedTrackingVariable tracker = new FakedTrackingVariable(local, location); tracker.globalClosingState = OWNED_BY_OUTSIDE; return tracker; } if (expression.resolvedType instanceof ReferenceBinding) { ReferenceBinding resourceType = (ReferenceBinding) expression.resolvedType; if (resourceType.hasTypeBit(TypeIds.BitResourceFreeCloseable)) { return null; } } if (local.closeTracker != null) return local.closeTracker; FakedTrackingVariable newTracker = new FakedTrackingVariable(local, location); LocalVariableBinding rhsLocal = expression.localVariableBinding(); if (rhsLocal != null && rhsLocal.isParameter()) { newTracker.globalClosingState = OWNED_BY_OUTSIDE; } return newTracker; }
/** * Analyze structure of a closeable expression, matching (chained) resources against our white lists. * @param flowInfo where to record close status * @param local local variable to which the closeable is being assigned * @param location where to flag errors/warnings against * @param expression expression to be analyzed * @param previousTracker when analyzing a re-assignment we may already have a tracking variable for local, * which we should then re-use * @return a tracking variable associated with local or null if no need to track */
Analyze structure of a closeable expression, matching (chained) resources against our white lists
analyseCloseableExpression
{ "repo_name": "trylimits/Eclipse-Postfix-Code-Completion-Juno38", "path": "juno38/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/ast/FakedTrackingVariable.java", "license": "epl-1.0", "size": 36034 }
[ "org.eclipse.jdt.internal.compiler.flow.FlowInfo", "org.eclipse.jdt.internal.compiler.lookup.Binding", "org.eclipse.jdt.internal.compiler.lookup.LocalVariableBinding", "org.eclipse.jdt.internal.compiler.lookup.ReferenceBinding", "org.eclipse.jdt.internal.compiler.lookup.TypeIds" ]
import org.eclipse.jdt.internal.compiler.flow.FlowInfo; import org.eclipse.jdt.internal.compiler.lookup.Binding; import org.eclipse.jdt.internal.compiler.lookup.LocalVariableBinding; import org.eclipse.jdt.internal.compiler.lookup.ReferenceBinding; import org.eclipse.jdt.internal.compiler.lookup.TypeIds;
import org.eclipse.jdt.internal.compiler.flow.*; import org.eclipse.jdt.internal.compiler.lookup.*;
[ "org.eclipse.jdt" ]
org.eclipse.jdt;
2,728,943
public void setStrokeParts(float w, int cap, int join, float limit, float[] ary, float phase) { if (w == NOWIDTH) { w = this.state.stroke.getLineWidth(); } if (cap == NOCAP) { cap = this.state.stroke.getEndCap(); } if (join == NOJOIN) { join = this.state.stroke.getLineJoin(); } if (limit == NOLIMIT) { limit = this.state.stroke.getMiterLimit(); } if (phase == NOPHASE) { ary = this.state.stroke.getDashArray(); phase = this.state.stroke.getDashPhase(); } if (ary != null && ary.length == 0) { ary = null; } if (phase == NOPHASE) { this.state.stroke = new BasicStroke(w, cap, join, limit); } else { this.state.stroke = new BasicStroke(w, cap, join, limit, ary, phase); } }
void function(float w, int cap, int join, float limit, float[] ary, float phase) { if (w == NOWIDTH) { w = this.state.stroke.getLineWidth(); } if (cap == NOCAP) { cap = this.state.stroke.getEndCap(); } if (join == NOJOIN) { join = this.state.stroke.getLineJoin(); } if (limit == NOLIMIT) { limit = this.state.stroke.getMiterLimit(); } if (phase == NOPHASE) { ary = this.state.stroke.getDashArray(); phase = this.state.stroke.getDashPhase(); } if (ary != null && ary.length == 0) { ary = null; } if (phase == NOPHASE) { this.state.stroke = new BasicStroke(w, cap, join, limit); } else { this.state.stroke = new BasicStroke(w, cap, join, limit, ary, phase); } }
/** * Set some or all aspects of the current stroke. * @param w the width of the stroke, or NOWIDTH to leave it unchanged * @param cap the end cap style, or NOCAP to leave it unchanged * @param join the join style, or NOJOIN to leave it unchanged * @param limit the miter limit, or NOLIMIT to leave it unchanged * @param phase the phase of the dash array, or NOPHASE to leave it * unchanged * @param ary the dash array, or null to leave it unchanged. phase * and ary must both be valid, or phase must be NOPHASE while ary is null. */
Set some or all aspects of the current stroke
setStrokeParts
{ "repo_name": "denisfalqueto/PDFrenderer", "path": "src/main/java/com/sun/pdfview/PDFRenderer.java", "license": "lgpl-2.1", "size": 34029 }
[ "java.awt.BasicStroke" ]
import java.awt.BasicStroke;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,228,938
public void put(ByteBuffer src) { try { file.flush(); tmpChannel.write(src); } catch (IOException e) { throw new MapFailedException("could not write buffer to mdr tmp file"); } }
void function(ByteBuffer src) { try { file.flush(); tmpChannel.write(src); } catch (IOException e) { throw new MapFailedException(STR); } }
/** * Write out a complete byte buffer. * * @param src The buffer to write. */
Write out a complete byte buffer
put
{ "repo_name": "balp/mkgmap", "path": "src/uk/me/parabola/imgfmt/app/FileBackedImgFileWriter.java", "license": "gpl-2.0", "size": 5545 }
[ "java.io.IOException", "java.nio.ByteBuffer", "uk.me.parabola.imgfmt.MapFailedException" ]
import java.io.IOException; import java.nio.ByteBuffer; import uk.me.parabola.imgfmt.MapFailedException;
import java.io.*; import java.nio.*; import uk.me.parabola.imgfmt.*;
[ "java.io", "java.nio", "uk.me.parabola" ]
java.io; java.nio; uk.me.parabola;
39,509
public Collection getLoggerNames();
Collection function();
/** * Returns an unmodifiable Collection of the uniquely * known LoggerNames within this model. * * @return unmodifiable Collection of Logger name Strings */
Returns an unmodifiable Collection of the uniquely known LoggerNames within this model
getLoggerNames
{ "repo_name": "apache/chainsaw", "path": "src/main/java/org/apache/log4j/chainsaw/LoggerNameModel.java", "license": "apache-2.0", "size": 1840 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
84,497
public long getTime() { return (Sys.getTime() * 1000) / Sys.getTimerResolution(); }
long function() { return (Sys.getTime() * 1000) / Sys.getTimerResolution(); }
/** * Get the accurate system time * * @return The system time in milliseconds */
Get the accurate system time
getTime
{ "repo_name": "CyboticCatfish/code404", "path": "CodingGame/lib/slick/src/org/newdawn/slick/GameContainer.java", "license": "gpl-2.0", "size": 22558 }
[ "org.lwjgl.Sys" ]
import org.lwjgl.Sys;
import org.lwjgl.*;
[ "org.lwjgl" ]
org.lwjgl;
787,711
protected MongoClient createMongoClient() throws UnknownHostException { operationalInfo = new LinkedHashMap<>(); String dbName = config.get("db", "keycloak"); String uriString = config.get("uri"); if (uriString != null) { MongoClientURI uri = new MongoClientURI(uriString); MongoClient client = new MongoClient(uri); StringBuilder hostsBuilder = new StringBuilder(); for (int i=0 ; i<uri.getHosts().size() ; i++) { if (i!=0) { hostsBuilder.append(", "); } hostsBuilder.append(uri.getHosts().get(i)); } String hosts = hostsBuilder.toString(); operationalInfo.put("mongoHosts", hosts); operationalInfo.put("mongoDatabaseName", dbName); operationalInfo.put("mongoUser", uri.getUsername()); operationalInfo.put("mongoDriverVersion", client.getVersion()); logger.debugv("Initialized mongo model. host(s): %s, db: %s", uri.getHosts(), dbName); return client; } else { String host = config.get("host", ServerAddress.defaultHost()); int port = config.getInt("port", ServerAddress.defaultPort()); String user = config.get("user"); String password = config.get("password"); MongoClientOptions clientOptions = getClientOptions(); MongoClient client; if (user != null && password != null) { MongoCredential credential = MongoCredential.createMongoCRCredential(user, dbName, password.toCharArray()); client = new MongoClient(new ServerAddress(host, port), Collections.singletonList(credential), clientOptions); } else { client = new MongoClient(new ServerAddress(host, port), clientOptions); } operationalInfo.put("mongoServerAddress", client.getAddress().toString()); operationalInfo.put("mongoDatabaseName", dbName); operationalInfo.put("mongoUser", user); operationalInfo.put("mongoDriverVersion", client.getVersion()); logger.debugv("Initialized mongo model. host: %s, port: %d, db: %s", host, port, dbName); return client; } }
MongoClient function() throws UnknownHostException { operationalInfo = new LinkedHashMap<>(); String dbName = config.get("db", STR); String uriString = config.get("uri"); if (uriString != null) { MongoClientURI uri = new MongoClientURI(uriString); MongoClient client = new MongoClient(uri); StringBuilder hostsBuilder = new StringBuilder(); for (int i=0 ; i<uri.getHosts().size() ; i++) { if (i!=0) { hostsBuilder.append(STR); } hostsBuilder.append(uri.getHosts().get(i)); } String hosts = hostsBuilder.toString(); operationalInfo.put(STR, hosts); operationalInfo.put(STR, dbName); operationalInfo.put(STR, uri.getUsername()); operationalInfo.put(STR, client.getVersion()); logger.debugv(STR, uri.getHosts(), dbName); return client; } else { String host = config.get("host", ServerAddress.defaultHost()); int port = config.getInt("port", ServerAddress.defaultPort()); String user = config.get("user"); String password = config.get(STR); MongoClientOptions clientOptions = getClientOptions(); MongoClient client; if (user != null && password != null) { MongoCredential credential = MongoCredential.createMongoCRCredential(user, dbName, password.toCharArray()); client = new MongoClient(new ServerAddress(host, port), Collections.singletonList(credential), clientOptions); } else { client = new MongoClient(new ServerAddress(host, port), clientOptions); } operationalInfo.put(STR, client.getAddress().toString()); operationalInfo.put(STR, dbName); operationalInfo.put(STR, user); operationalInfo.put(STR, client.getVersion()); logger.debugv(STR, host, port, dbName); return client; } }
/** * Override this method if you want more possibility to configure Mongo client. It can be also used to inject mongo client * from different source. * * This method can assume that "config" is already set and can use it. * * @return mongoClient instance, which will be shared for whole Keycloak * * @throws UnknownHostException */
Override this method if you want more possibility to configure Mongo client. It can be also used to inject mongo client from different source. This method can assume that "config" is already set and can use it
createMongoClient
{ "repo_name": "dylanplecki/keycloak", "path": "connections/mongo/src/main/java/org/keycloak/connections/mongo/DefaultMongoConnectionFactoryProvider.java", "license": "apache-2.0", "size": 10547 }
[ "com.mongodb.MongoClient", "com.mongodb.MongoClientOptions", "com.mongodb.MongoClientURI", "com.mongodb.MongoCredential", "com.mongodb.ServerAddress", "java.net.UnknownHostException", "java.util.Collections", "java.util.LinkedHashMap" ]
import com.mongodb.MongoClient; import com.mongodb.MongoClientOptions; import com.mongodb.MongoClientURI; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; import java.net.UnknownHostException; import java.util.Collections; import java.util.LinkedHashMap;
import com.mongodb.*; import java.net.*; import java.util.*;
[ "com.mongodb", "java.net", "java.util" ]
com.mongodb; java.net; java.util;
1,470,864
// TODO This is badly in need of cleanup. public void add(AVMNode node, String name, boolean directlyContained, boolean write) { LookupComponent comp = new LookupComponent(); comp.setName(name); comp.setNode(node); if (fPosition >= 0 && fDirectlyContained && getCurrentNode().getType() == AVMNodeType.LAYERED_DIRECTORY) { // if (directlyContained != ((DirectoryNode)fComponents.get(fPosition).getNode()).directlyContains(node)) // { // System.err.println("Bloody Murder!"); // } fDirectlyContained = directlyContained; if (logger.isTraceEnabled()) { logger.trace("add: fDirectlyContained = "+directlyContained); } } if (!write) { if (node.getType() == AVMNodeType.LAYERED_DIRECTORY) { LayeredDirectoryNode oNode = (LayeredDirectoryNode)node; if (oNode.getPrimaryIndirection()) { comp.setIndirection(oNode.getIndirection()); comp.setIndirectionVersion(oNode.getIndirectionVersion()); } else { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(ind.getSecond()); } fLayeredYet = true; // Record the first layer seen. if (fTopLayer == null) { fTopLayer = oNode; fTopLayerIndex = fPosition + 1; } fLowestLayerIndex = fPosition + 1; } fComponents.add(comp); fPosition++; return; } if (!node.getIsNew()) { fNeedsCopying = true; if (logger.isTraceEnabled()) { logger.trace("add-cow: "+this+" ("+fPosition+") (not new)"); } } else { if (fPosition >= 0 && !fDirectlyContained) { fNeedsCopying = true; if (logger.isTraceEnabled()) { logger.trace("add: COW: "+this+" ("+fPosition+") (new, not directly contained)"); } } } // Record various things if this is layered. if (node.getType() == AVMNodeType.LAYERED_DIRECTORY) { LayeredDirectoryNode oNode = (LayeredDirectoryNode)node; // Record the indirection path that should be used. if (oNode.getPrimaryIndirection()) { comp.setIndirection(oNode.getIndirection()); comp.setIndirectionVersion(-1); } else { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(-1); } fLayeredYet = true; // Record the first layer seen. if (fTopLayer == null) { fTopLayer = oNode; fTopLayerIndex = fPosition + 1; } fLowestLayerIndex = fPosition + 1; } // In a write context a plain directory contained in a layer will // be copied so we will need to compute an indirection path. else if (fLayeredYet) { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(-1); } fComponents.add(comp); fPosition++; // If we are in a write context do copy on write. if (fNeedsCopying) { node = node.copy(this); getCurrentLookupComponent().setNode(node); if (fPosition == 0) { // Inform the store of a new root. fAVMStore.setNewRoot((DirectoryNode)node); ((DirectoryNode)node).setIsRoot(true); AVMDAOs.Instance().fAVMNodeDAO.update(((DirectoryNode)node)); AVMDAOs.Instance().fAVMStoreDAO.update(fAVMStore); return; } // Not the root. Check if we are the top layer and insert this into it's parent. if (fPosition == fTopLayerIndex) { fTopLayer = (LayeredDirectoryNode)node; } ((DirectoryNode)fComponents.get(fPosition - 1).getNode()).putChild(name, node); } }
void function(AVMNode node, String name, boolean directlyContained, boolean write) { LookupComponent comp = new LookupComponent(); comp.setName(name); comp.setNode(node); if (fPosition >= 0 && fDirectlyContained && getCurrentNode().getType() == AVMNodeType.LAYERED_DIRECTORY) { fDirectlyContained = directlyContained; if (logger.isTraceEnabled()) { logger.trace(STR+directlyContained); } } if (!write) { if (node.getType() == AVMNodeType.LAYERED_DIRECTORY) { LayeredDirectoryNode oNode = (LayeredDirectoryNode)node; if (oNode.getPrimaryIndirection()) { comp.setIndirection(oNode.getIndirection()); comp.setIndirectionVersion(oNode.getIndirectionVersion()); } else { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(ind.getSecond()); } fLayeredYet = true; if (fTopLayer == null) { fTopLayer = oNode; fTopLayerIndex = fPosition + 1; } fLowestLayerIndex = fPosition + 1; } fComponents.add(comp); fPosition++; return; } if (!node.getIsNew()) { fNeedsCopying = true; if (logger.isTraceEnabled()) { logger.trace(STR+this+STR+fPosition+STR); } } else { if (fPosition >= 0 && !fDirectlyContained) { fNeedsCopying = true; if (logger.isTraceEnabled()) { logger.trace(STR+this+STR+fPosition+STR); } } } if (node.getType() == AVMNodeType.LAYERED_DIRECTORY) { LayeredDirectoryNode oNode = (LayeredDirectoryNode)node; if (oNode.getPrimaryIndirection()) { comp.setIndirection(oNode.getIndirection()); comp.setIndirectionVersion(-1); } else { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(-1); } fLayeredYet = true; if (fTopLayer == null) { fTopLayer = oNode; fTopLayerIndex = fPosition + 1; } fLowestLayerIndex = fPosition + 1; } else if (fLayeredYet) { Pair<String, Integer> ind = computeIndirection(name); comp.setIndirection(ind.getFirst()); comp.setIndirectionVersion(-1); } fComponents.add(comp); fPosition++; if (fNeedsCopying) { node = node.copy(this); getCurrentLookupComponent().setNode(node); if (fPosition == 0) { fAVMStore.setNewRoot((DirectoryNode)node); ((DirectoryNode)node).setIsRoot(true); AVMDAOs.Instance().fAVMNodeDAO.update(((DirectoryNode)node)); AVMDAOs.Instance().fAVMStoreDAO.update(fAVMStore); return; } if (fPosition == fTopLayerIndex) { fTopLayer = (LayeredDirectoryNode)node; } ((DirectoryNode)fComponents.get(fPosition - 1).getNode()).putChild(name, node); } }
/** * Add a new node to the lookup. * @param node The node to add. * @param name The name of the node in the path. * @param write Whether this is in the context of * a write operation. */
Add a new node to the lookup
add
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/repository/source/java/org/alfresco/repo/avm/Lookup.java", "license": "lgpl-3.0", "size": 18080 }
[ "org.alfresco.util.Pair" ]
import org.alfresco.util.Pair;
import org.alfresco.util.*;
[ "org.alfresco.util" ]
org.alfresco.util;
1,369,749
public void testImportAutoCheckoutNodes() throws IOException, JsonException { final String testPath = TEST_BASE_PATH; Map<String, String> props = new HashMap<String, String>(); String testNode = testClient.createNode(HTTP_BASE_URL + testPath, props); urlsToDelete.add(testNode); //1. first create some content to update. props.clear(); props.put(SlingPostConstants.RP_OPERATION, SlingPostConstants.OPERATION_IMPORT); String testNodeName = "testNode_" + String.valueOf(random.nextInt()); props.put(SlingPostConstants.RP_NODE_NAME_HINT, testNodeName); testFile = getTestFile(getClass().getResourceAsStream("/integration-test/servlets/post/testimport3.json")); props.put(SlingPostConstants.RP_CONTENT_TYPE, "json"); props.put(SlingPostConstants.RP_REDIRECT_TO, SERVLET_CONTEXT + testPath + "*;q=0.9")); HttpMethod post = assertPostStatus(importedNodeUrl, HttpServletResponse.SC_CREATED, postParams, "Expected 201 status"); String responseBodyAsString = post.getResponseBodyAsString(); JsonObject responseJSON = JsonUtil.parseObject(responseBodyAsString); JsonArray changes = responseJSON.getJsonArray("changes"); JsonObject checkoutChange = changes.getJsonObject(0); assertEquals("checkout", checkoutChange.getString("type")); // assert content at new location String content2 = getContent(importedNodeUrl + ".json", CONTENT_TYPE_JSON); JsonObject jsonObj2 = JsonUtil.parseObject(content2); assertNotNull(jsonObj2); //make sure it was really updated assertEquals("def2", jsonObj2.getString("abc")); //assert that the versionable node is checked back in. assertFalse(jsonObj.getBoolean("jcr:isCheckedOut")); }
void function() throws IOException, JsonException { final String testPath = TEST_BASE_PATH; Map<String, String> props = new HashMap<String, String>(); String testNode = testClient.createNode(HTTP_BASE_URL + testPath, props); urlsToDelete.add(testNode); props.clear(); props.put(SlingPostConstants.RP_OPERATION, SlingPostConstants.OPERATION_IMPORT); String testNodeName = STR + String.valueOf(random.nextInt()); props.put(SlingPostConstants.RP_NODE_NAME_HINT, testNodeName); testFile = getTestFile(getClass().getResourceAsStream(STR)); props.put(SlingPostConstants.RP_CONTENT_TYPE, "json"); props.put(SlingPostConstants.RP_REDIRECT_TO, SERVLET_CONTEXT + testPath + STR)); HttpMethod post = assertPostStatus(importedNodeUrl, HttpServletResponse.SC_CREATED, postParams, STR); String responseBodyAsString = post.getResponseBodyAsString(); JsonObject responseJSON = JsonUtil.parseObject(responseBodyAsString); JsonArray changes = responseJSON.getJsonArray(STR); JsonObject checkoutChange = changes.getJsonObject(0); assertEquals(STR, checkoutChange.getString("type")); String content2 = getContent(importedNodeUrl + ".json", CONTENT_TYPE_JSON); JsonObject jsonObj2 = JsonUtil.parseObject(content2); assertNotNull(jsonObj2); assertEquals("def2", jsonObj2.getString("abc")); assertFalse(jsonObj.getBoolean(STR)); }
/** * SLING-2108 Test import operation which auto checks out versionable nodes. */
SLING-2108 Test import operation which auto checks out versionable nodes
testImportAutoCheckoutNodes
{ "repo_name": "tmaret/sling", "path": "launchpad/integration-tests/src/main/java/org/apache/sling/launchpad/webapp/integrationtest/servlets/post/PostServletImportTest.java", "license": "apache-2.0", "size": 38777 }
[ "java.io.IOException", "java.util.HashMap", "java.util.Map", "javax.json.JsonArray", "javax.json.JsonException", "javax.json.JsonObject", "javax.servlet.http.HttpServletResponse", "org.apache.commons.httpclient.HttpMethod", "org.apache.sling.launchpad.webapp.integrationtest.util.JsonUtil", "org.apache.sling.servlets.post.SlingPostConstants" ]
import java.io.IOException; import java.util.HashMap; import java.util.Map; import javax.json.JsonArray; import javax.json.JsonException; import javax.json.JsonObject; import javax.servlet.http.HttpServletResponse; import org.apache.commons.httpclient.HttpMethod; import org.apache.sling.launchpad.webapp.integrationtest.util.JsonUtil; import org.apache.sling.servlets.post.SlingPostConstants;
import java.io.*; import java.util.*; import javax.json.*; import javax.servlet.http.*; import org.apache.commons.httpclient.*; import org.apache.sling.launchpad.webapp.integrationtest.util.*; import org.apache.sling.servlets.post.*;
[ "java.io", "java.util", "javax.json", "javax.servlet", "org.apache.commons", "org.apache.sling" ]
java.io; java.util; javax.json; javax.servlet; org.apache.commons; org.apache.sling;
482,231
public static Transaction fromProposal(Proposal prop) { Zxid zxid = fromProtoZxid(prop.getZxid()); ByteBuffer buffer = prop.getBody().asReadOnlyByteBuffer(); return new Transaction(zxid, prop.getType().getNumber(), buffer); }
static Transaction function(Proposal prop) { Zxid zxid = fromProtoZxid(prop.getZxid()); ByteBuffer buffer = prop.getBody().asReadOnlyByteBuffer(); return new Transaction(zxid, prop.getType().getNumber(), buffer); }
/** * Converts protobuf Proposal object to Transaction object. * * @param prop the protobuf Proposal object. * @return the Transaction object. */
Converts protobuf Proposal object to Transaction object
fromProposal
{ "repo_name": "fpj/jzab", "path": "src/main/java/com/github/zk1931/jzab/MessageBuilder.java", "license": "apache-2.0", "size": 23166 }
[ "com.github.zk1931.jzab.proto.ZabMessage", "java.nio.ByteBuffer" ]
import com.github.zk1931.jzab.proto.ZabMessage; import java.nio.ByteBuffer;
import com.github.zk1931.jzab.proto.*; import java.nio.*;
[ "com.github.zk1931", "java.nio" ]
com.github.zk1931; java.nio;
1,950,850
public QueryResult getAuthorsUnpublishedQuestions(final Long inAuthorID, final Long inGroupID) throws VException, SQLException { return getAuthorsQuestions(inAuthorID, inGroupID, WorkflowAwareContribution.STATES_UNPUBLISHED); }
QueryResult function(final Long inAuthorID, final Long inGroupID) throws VException, SQLException { return getAuthorsQuestions(inAuthorID, inGroupID, WorkflowAwareContribution.STATES_UNPUBLISHED); }
/** Returns all unpublished questions in the specified group the specified member has authored. * * @param inAuthorID Long * @param inGroupID Long * @return QueryResult * @throws VException * @throws SQLException */
Returns all unpublished questions in the specified group the specified member has authored
getAuthorsUnpublishedQuestions
{ "repo_name": "aktion-hip/vif", "path": "org.hip.vif.core/src/org/hip/vif/core/bom/impl/JoinAuthorReviewerToQuestionHome.java", "license": "gpl-2.0", "size": 15943 }
[ "java.sql.SQLException", "org.hip.kernel.bom.QueryResult", "org.hip.kernel.exc.VException" ]
import java.sql.SQLException; import org.hip.kernel.bom.QueryResult; import org.hip.kernel.exc.VException;
import java.sql.*; import org.hip.kernel.bom.*; import org.hip.kernel.exc.*;
[ "java.sql", "org.hip.kernel" ]
java.sql; org.hip.kernel;
1,389,922
public void untar(final FilePath target, final TarCompression compression) throws IOException, InterruptedException { // TODO: post release, re-unite two branches by introducing FileStreamCallable that resolves InputStream if (this.channel!=target.channel) {// local -> remote or remote->local final RemoteInputStream in = new RemoteInputStream(read(), Flag.GREEDY); target.act(new UntarRemote(compression, in)); } else {// local -> local or remote->remote target.act(new UntarLocal(compression)); } } private class UntarRemote extends SecureFileCallable<Void> { private final TarCompression compression; private final RemoteInputStream in; UntarRemote(TarCompression compression, RemoteInputStream in) { this.compression = compression; this.in = in; }
void function(final FilePath target, final TarCompression compression) throws IOException, InterruptedException { if (this.channel!=target.channel) { final RemoteInputStream in = new RemoteInputStream(read(), Flag.GREEDY); target.act(new UntarRemote(compression, in)); } else { target.act(new UntarLocal(compression)); } } private class UntarRemote extends SecureFileCallable<Void> { private final TarCompression compression; private final RemoteInputStream in; UntarRemote(TarCompression compression, RemoteInputStream in) { this.compression = compression; this.in = in; }
/** * When this {@link FilePath} represents a tar file, extracts that tar file. * * @param target * Target directory to expand files to. All the necessary directories will be created. * @param compression * Compression mode of this tar file. * @since 1.292 * @see #untarFrom(InputStream, TarCompression) */
When this <code>FilePath</code> represents a tar file, extracts that tar file
untar
{ "repo_name": "pjanouse/jenkins", "path": "core/src/main/java/hudson/FilePath.java", "license": "mit", "size": 148331 }
[ "hudson.remoting.RemoteInputStream", "java.io.IOException" ]
import hudson.remoting.RemoteInputStream; import java.io.IOException;
import hudson.remoting.*; import java.io.*;
[ "hudson.remoting", "java.io" ]
hudson.remoting; java.io;
1,868,067
public static <E> ListIterator<E> compose(ListIterator<E> listIterator, Callback callback) { return new CallbackListIterator<E>(listIterator, callback); }
static <E> ListIterator<E> function(ListIterator<E> listIterator, Callback callback) { return new CallbackListIterator<E>(listIterator, callback); }
/** * Returns the composition of the given iterator and callback. * <p> * Note: Any changes to the original iterator will cause the returned * iterator to change and vice versa. But direct changes on the source * will not be propagated with the callback. * </p> * * @param <E> the generic element type * @param listIterator the underlying iterator * @param callback the custom callback * @return a iterator which propagates strucutural changes using the specified callback */
Returns the composition of the given iterator and callback. Note: Any changes to the original iterator will cause the returned iterator to change and vice versa. But direct changes on the source will not be propagated with the callback.
compose
{ "repo_name": "cosmocode/cosmocode-commons", "path": "src/main/java/de/cosmocode/collections/callback/Callbacks.java", "license": "apache-2.0", "size": 5183 }
[ "java.util.ListIterator" ]
import java.util.ListIterator;
import java.util.*;
[ "java.util" ]
java.util;
1,860,294
@Override public List shortestPath(Graph graph, List locations, Weighing weigh) { MattsDijkstra dijkstra = new MattsDijkstra(); dijkstra.setGraph(graph); dijkstra.setWeighing(weigh); ArrayList<Integer> path = new ArrayList<Integer>(); // set the start dijkstra.setStart((int) locations.get(0)); ArrayList<Integer> intPath = new ArrayList<Integer>(); // find the shortest path for (int id : (ArrayList<Integer>)locations){ // find the shortest path from one location to the next dijkstra.computeShortestPath(); // shortest path from start to end intPath = (ArrayList<Integer>) dijkstra.getPath(id); // set start id for next iteration dijkstra.setStart(id); // add intermediate path to total path for( int i : intPath){ path.add(i); } } return path; }
List function(Graph graph, List locations, Weighing weigh) { MattsDijkstra dijkstra = new MattsDijkstra(); dijkstra.setGraph(graph); dijkstra.setWeighing(weigh); ArrayList<Integer> path = new ArrayList<Integer>(); dijkstra.setStart((int) locations.get(0)); ArrayList<Integer> intPath = new ArrayList<Integer>(); for (int id : (ArrayList<Integer>)locations){ dijkstra.computeShortestPath(); intPath = (ArrayList<Integer>) dijkstra.getPath(id); dijkstra.setStart(id); for( int i : intPath){ path.add(i); } } return path; }
/** * Computes the shortest path that visits all the locations in * the graph in the order specified. * @param graph An arbitrary directed graph * @param locations An ordered list of vertex ID locations to be visited. * @param weigh A object to determine the weight of each edge in the graph * @return An ordered list of vertex IDs representing the shortest path * that visits all the locations in the given order */
Computes the shortest path that visits all the locations in the graph in the order specified
shortestPath
{ "repo_name": "mattmckillip/Coms311", "path": "CoffeSolver/implementation/MattsCoffeeSolver.java", "license": "mit", "size": 4070 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,733,769
@Test(expected = IndexOutOfBoundsException.class) public void testOutOfTupleBoundsGrouping2() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0); // should not work, key out of tuple bounds groupDs.minBy(-1); }
@Test(expected = IndexOutOfBoundsException.class) void function() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); UnsortedGrouping<Tuple5<Integer, Long, String, Long, Integer>> groupDs = env.fromCollection(emptyTupleData, tupleTypeInfo).groupBy(0); groupDs.minBy(-1); }
/** * This test validates that an index which is out of bounds throws an IndexOutOfBoundsException. */
This test validates that an index which is out of bounds throws an IndexOutOfBoundsException
testOutOfTupleBoundsGrouping2
{ "repo_name": "lincoln-lil/flink", "path": "flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java", "license": "apache-2.0", "size": 9825 }
[ "org.apache.flink.api.java.ExecutionEnvironment", "org.apache.flink.api.java.operators.UnsortedGrouping", "org.apache.flink.api.java.tuple.Tuple5", "org.junit.Test" ]
import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.operators.UnsortedGrouping; import org.apache.flink.api.java.tuple.Tuple5; import org.junit.Test;
import org.apache.flink.api.java.*; import org.apache.flink.api.java.operators.*; import org.apache.flink.api.java.tuple.*; import org.junit.*;
[ "org.apache.flink", "org.junit" ]
org.apache.flink; org.junit;
2,265,485
@Override public void write(byte[] b) throws IOException { underlying.write(b); }
void function(byte[] b) throws IOException { underlying.write(b); }
/** * Method to write a byte array to the underlying repository file. * * @param b byte[] to write to the file * * @return void * * @throws IOException * * @see OutputStream * @see RandomAccessFile */
Method to write a byte array to the underlying repository file
write
{ "repo_name": "gujianxiao/gatewayForMulticom", "path": "javasrc/src/main/org/ndnx/ndn/impl/repo/RandomAccessOutputStream.java", "license": "lgpl-2.1", "size": 2717 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
112,245
@Test public void mergeEmptyHadoopConfiguration() { org.apache.hadoop.conf.Configuration hadoopConfig = new org.apache.hadoop.conf.Configuration(); long beforeSize = Configuration.toMap().size(); HadoopConfigurationUtils.mergeHadoopConfiguration(hadoopConfig, Configuration.global()); long afterSize = Configuration.toMap().size(); Assert.assertEquals(beforeSize, afterSize); Assert.assertFalse(Configuration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)); }
void function() { org.apache.hadoop.conf.Configuration hadoopConfig = new org.apache.hadoop.conf.Configuration(); long beforeSize = Configuration.toMap().size(); HadoopConfigurationUtils.mergeHadoopConfiguration(hadoopConfig, Configuration.global()); long afterSize = Configuration.toMap().size(); Assert.assertEquals(beforeSize, afterSize); Assert.assertFalse(Configuration.getBoolean(PropertyKey.ZOOKEEPER_ENABLED)); }
/** * Test for the {@link HadoopConfigurationUtils#mergeHadoopConfiguration} method for an empty * configuration. */
Test for the <code>HadoopConfigurationUtils#mergeHadoopConfiguration</code> method for an empty configuration
mergeEmptyHadoopConfiguration
{ "repo_name": "PasaLab/tachyon", "path": "core/client/hdfs/src/test/java/alluxio/hadoop/HadoopConfigurationUtilsTest.java", "license": "apache-2.0", "size": 3341 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,582,232
public Metadata.Builder removeStandardMethodUnderrides(Metadata.StandardMethod key) { Preconditions.checkNotNull(key); Preconditions.checkArgument(standardMethodUnderrides.containsKey(key), "Key not present in standardMethodUnderrides: %s", key); standardMethodUnderrides.remove(key); return (Metadata.Builder) this; }
Metadata.Builder function(Metadata.StandardMethod key) { Preconditions.checkNotNull(key); Preconditions.checkArgument(standardMethodUnderrides.containsKey(key), STR, key); standardMethodUnderrides.remove(key); return (Metadata.Builder) this; }
/** * Removes the mapping for {@code key} from the map to be returned from * {@link Metadata#getStandardMethodUnderrides()}. * * @return this {@code Builder} object * @throws NullPointerException if {@code key} is null * @throws IllegalArgumentException if {@code key} is not present */
Removes the mapping for key from the map to be returned from <code>Metadata#getStandardMethodUnderrides()</code>
removeStandardMethodUnderrides
{ "repo_name": "sposam/FreeBuilder", "path": "src/main/java/org/inferred/freebuilder/processor/Metadata_Builder.java", "license": "apache-2.0", "size": 38537 }
[ "com.google.common.base.Preconditions", "org.inferred.freebuilder.processor.Metadata" ]
import com.google.common.base.Preconditions; import org.inferred.freebuilder.processor.Metadata;
import com.google.common.base.*; import org.inferred.freebuilder.processor.*;
[ "com.google.common", "org.inferred.freebuilder" ]
com.google.common; org.inferred.freebuilder;
1,511,816
public void finish() { if (mAccountAuthenticatorResponse != null) { // send the result bundle back if set, otherwise send an error. if (mResultBundle != null) { mAccountAuthenticatorResponse.onResult(mResultBundle); } else { mAccountAuthenticatorResponse.onError(AccountManager.ERROR_CODE_CANCELED, "canceled"); } mAccountAuthenticatorResponse = null; } super.finish(); }
void function() { if (mAccountAuthenticatorResponse != null) { if (mResultBundle != null) { mAccountAuthenticatorResponse.onResult(mResultBundle); } else { mAccountAuthenticatorResponse.onError(AccountManager.ERROR_CODE_CANCELED, STR); } mAccountAuthenticatorResponse = null; } super.finish(); }
/** * Sends the result or a Constants.ERROR_CODE_CANCELED error if a result isn't present. */
Sends the result or a Constants.ERROR_CODE_CANCELED error if a result isn't present
finish
{ "repo_name": "njucsyyh/android", "path": "app/src/main/java/com/github/mobile/ui/roboactivities/ActionBarAccountAuthenticatorActivity.java", "license": "apache-2.0", "size": 3181 }
[ "android.accounts.AccountManager" ]
import android.accounts.AccountManager;
import android.accounts.*;
[ "android.accounts" ]
android.accounts;
1,237,764
public String getSearchResultAbstract(); public String getSearchResultURI(); public Map<String, Object> getSearchResultAttributes();
String getSearchResultAbstract(); public String getSearchResultURI(); public Map<String, Object> function();
/** * Can return any arbitrary non-standard attributes with a Collection of attributeKey=attributeValue. * @return */
Can return any arbitrary non-standard attributes with a Collection of attributeKey=attributeValue
getSearchResultAttributes
{ "repo_name": "idega/com.idega.core", "path": "src/java/com/idega/core/search/business/SearchResult.java", "license": "gpl-3.0", "size": 1394 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
825,536
@ServiceMethod(returns = ReturnType.SINGLE) PollerFlux< PollResult<ExpressRouteCircuitsRoutesTableListResultInner>, ExpressRouteCircuitsRoutesTableListResultInner> beginListRoutesTableAsync(String resourceGroupName, String circuitName, String peeringName, String devicePath);
@ServiceMethod(returns = ReturnType.SINGLE) PollerFlux< PollResult<ExpressRouteCircuitsRoutesTableListResultInner>, ExpressRouteCircuitsRoutesTableListResultInner> beginListRoutesTableAsync(String resourceGroupName, String circuitName, String peeringName, String devicePath);
/** * Gets the currently advertised routes table associated with the express route circuit in a resource group. * * @param resourceGroupName The name of the resource group. * @param circuitName The name of the express route circuit. * @param peeringName The name of the peering. * @param devicePath The path of the device. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the currently advertised routes table associated with the express route circuit in a resource group. */
Gets the currently advertised routes table associated with the express route circuit in a resource group
beginListRoutesTableAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/ExpressRouteCircuitsClient.java", "license": "mit", "size": 51909 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.management.polling.PollResult", "com.azure.core.util.polling.PollerFlux", "com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitsRoutesTableListResultInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.polling.PollerFlux; import com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitsRoutesTableListResultInner;
import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.polling.*; import com.azure.resourcemanager.network.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
2,799,875
//------------------------------------------------------------------------- // Main interface for backend provider of the WebView class. //------------------------------------------------------------------------- public void init(Map<String, Object> javaScriptInterfaces, boolean privateBrowsing);
void function(Map<String, Object> javaScriptInterfaces, boolean privateBrowsing);
/** * Initialize this WebViewProvider instance. Called after the WebView has fully constructed. * @param javaScriptInterfaces is a Map of interface names, as keys, and * object implementing those interfaces, as values. * @param privateBrowsing If true the web view will be initialized in private / incognito mode. */
Initialize this WebViewProvider instance. Called after the WebView has fully constructed
init
{ "repo_name": "haikuowuya/android_system_code", "path": "src/android/webkit/WebViewProvider.java", "license": "apache-2.0", "size": 12212 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
505,005
public static final CommandService createLocalCommandService(Cache cache) throws CommandServiceException { if (cache == null || cache.isClosed()) { throw new CacheClosedException("Can not create command service as cache doesn't exist or cache is closed."); } if (localCommandService == null || !localCommandService.isUsable()) { String nonExistingDependency = CliUtil.cliDependenciesExist(false); if (nonExistingDependency != null) { throw new DependenciesNotFoundException(LocalizedStrings.CommandServiceManager_COULD_NOT_FIND__0__LIB_NEEDED_FOR_CLI_GFSH.toLocalizedString(new Object[] {nonExistingDependency})); } localCommandService = new MemberCommandService(cache); } return localCommandService; }
static final CommandService function(Cache cache) throws CommandServiceException { if (cache == null cache.isClosed()) { throw new CacheClosedException(STR); } if (localCommandService == null !localCommandService.isUsable()) { String nonExistingDependency = CliUtil.cliDependenciesExist(false); if (nonExistingDependency != null) { throw new DependenciesNotFoundException(LocalizedStrings.CommandServiceManager_COULD_NOT_FIND__0__LIB_NEEDED_FOR_CLI_GFSH.toLocalizedString(new Object[] {nonExistingDependency})); } localCommandService = new MemberCommandService(cache); } return localCommandService; }
/** * Returns a newly created or existing instance of the * <code>CommandService<code> associated with the * specified <code>Cache</code>. * * @param cache * Underlying <code>Cache</code> instance to be used to create a Command Service. * @throws CommandServiceException * If command service could not be initialized. */
Returns a newly created or existing instance of the <code>CommandService<code> associated with the specified <code>Cache</code>
createLocalCommandService
{ "repo_name": "ameybarve15/incubator-geode", "path": "gemfire-core/src/main/java/com/gemstone/gemfire/management/cli/CommandService.java", "license": "apache-2.0", "size": 7686 }
[ "com.gemstone.gemfire.cache.Cache", "com.gemstone.gemfire.cache.CacheClosedException", "com.gemstone.gemfire.internal.i18n.LocalizedStrings", "com.gemstone.gemfire.management.DependenciesNotFoundException", "com.gemstone.gemfire.management.internal.cli.CliUtil", "com.gemstone.gemfire.management.internal.cli.remote.MemberCommandService" ]
import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheClosedException; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.management.DependenciesNotFoundException; import com.gemstone.gemfire.management.internal.cli.CliUtil; import com.gemstone.gemfire.management.internal.cli.remote.MemberCommandService;
import com.gemstone.gemfire.cache.*; import com.gemstone.gemfire.internal.i18n.*; import com.gemstone.gemfire.management.*; import com.gemstone.gemfire.management.internal.cli.*; import com.gemstone.gemfire.management.internal.cli.remote.*;
[ "com.gemstone.gemfire" ]
com.gemstone.gemfire;
1,102,523
public NCMBRole getRole(String roleId) throws NCMBException { RequestParams reqParams = getRoleParams(roleId); NCMBResponse response = sendRequest(reqParams); getRoleCheckResponse(response); JSONObject result = response.responseData; return new NCMBRole(result); }
NCMBRole function(String roleId) throws NCMBException { RequestParams reqParams = getRoleParams(roleId); NCMBResponse response = sendRequest(reqParams); getRoleCheckResponse(response); JSONObject result = response.responseData; return new NCMBRole(result); }
/** * Get role information * @param roleId role id * @return role object * @throws NCMBException exception sdk internal or NIFTY Cloud mobile backend */
Get role information
getRole
{ "repo_name": "Rebirthble/ncmb_android", "path": "ncmb-core/src/main/java/com/nifty/cloud/mb/core/NCMBRoleService.java", "license": "apache-2.0", "size": 23391 }
[ "org.json.JSONObject" ]
import org.json.JSONObject;
import org.json.*;
[ "org.json" ]
org.json;
2,601,072
public Timestamp getUpdated(); public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
Timestamp function(); public static final String COLUMNNAME_UpdatedBy = STR;
/** Get Updated. * Date this record was updated */
Get Updated. Date this record was updated
getUpdated
{ "repo_name": "neuroidss/adempiere", "path": "base/src/org/compiere/model/I_AD_PInstance_Para.java", "license": "gpl-2.0", "size": 6733 }
[ "java.sql.Timestamp" ]
import java.sql.Timestamp;
import java.sql.*;
[ "java.sql" ]
java.sql;
837,702
private boolean isEventType(final String token) { SortedMap<String, String[]> eventCategories = org.eclipse.titan.log.viewer.utils.Constants.EVENT_CATEGORIES; if (token.contains(UNDERSCORE)) { // new format String cat = token.split(UNDERSCORE)[0]; String subCat = token.split(UNDERSCORE)[1]; if (eventCategories.keySet().contains(cat)) { String[] subCategories = eventCategories.get(cat); for (String subCategory : subCategories) { if (subCategory.equals(subCat)) { return true; } } } } else { // possibly old format if (eventCategories.keySet().contains(token)) { return true; } } return false; } //isEventType
boolean function(final String token) { SortedMap<String, String[]> eventCategories = org.eclipse.titan.log.viewer.utils.Constants.EVENT_CATEGORIES; if (token.contains(UNDERSCORE)) { String cat = token.split(UNDERSCORE)[0]; String subCat = token.split(UNDERSCORE)[1]; if (eventCategories.keySet().contains(cat)) { String[] subCategories = eventCategories.get(cat); for (String subCategory : subCategories) { if (subCategory.equals(subCat)) { return true; } } } } else { if (eventCategories.keySet().contains(token)) { return true; } } return false; }
/** * Checks if the Token is one of the predefined Event types * @param token the token * @return boolean true is token is a predefined Event type, otherwise false */
Checks if the Token is one of the predefined Event types
isEventType
{ "repo_name": "eroslevi/titan.EclipsePlug-ins", "path": "org.eclipse.titan.log.viewer/src/org/eclipse/titan/log/viewer/parsers/RecordParser.java", "license": "epl-1.0", "size": 14738 }
[ "java.util.SortedMap" ]
import java.util.SortedMap;
import java.util.*;
[ "java.util" ]
java.util;
717,499
ServiceOperationSignature getSourceOperationSignature();
ServiceOperationSignature getSourceOperationSignature();
/** * Getter for the service operation name of the source service. * * @return the source operation name */
Getter for the service operation name of the source service
getSourceOperationSignature
{ "repo_name": "NABUCCO/org.nabucco.framework.base", "path": "org.nabucco.framework.base.facade.component/src/main/man/org/nabucco/framework/base/facade/component/application/connector/ServiceConnector.java", "license": "epl-1.0", "size": 1355 }
[ "org.nabucco.framework.base.facade.service.signature.ServiceOperationSignature" ]
import org.nabucco.framework.base.facade.service.signature.ServiceOperationSignature;
import org.nabucco.framework.base.facade.service.signature.*;
[ "org.nabucco.framework" ]
org.nabucco.framework;
2,871,410
public boolean isIdenticalAddresses(LinkProperties target) { Collection<InetAddress> targetAddresses = target.getAddresses(); Collection<InetAddress> sourceAddresses = getAddresses(); return (sourceAddresses.size() == targetAddresses.size()) ? sourceAddresses.containsAll(targetAddresses) : false; }
boolean function(LinkProperties target) { Collection<InetAddress> targetAddresses = target.getAddresses(); Collection<InetAddress> sourceAddresses = getAddresses(); return (sourceAddresses.size() == targetAddresses.size()) ? sourceAddresses.containsAll(targetAddresses) : false; }
/** * Compares this {@code LinkProperties} interface addresses against the target * * @param target LinkProperties to compare. * @return {@code true} if both are identical, {@code false} otherwise. * @hide */
Compares this LinkProperties interface addresses against the target
isIdenticalAddresses
{ "repo_name": "xorware/android_frameworks_base", "path": "core/java/android/net/LinkProperties.java", "license": "apache-2.0", "size": 42081 }
[ "java.net.InetAddress", "java.util.Collection" ]
import java.net.InetAddress; import java.util.Collection;
import java.net.*; import java.util.*;
[ "java.net", "java.util" ]
java.net; java.util;
1,413,390
@Override public void mouseReleased(MouseEvent e) { synchronized (mouseLock) { mousePressed = false; } if (e.getButton() == MouseEvent.BUTTON1) { for (DrawListener listener : listeners) listener.mouseReleased(userX(e.getX()), userY(e.getY())); } }
void function(MouseEvent e) { synchronized (mouseLock) { mousePressed = false; } if (e.getButton() == MouseEvent.BUTTON1) { for (DrawListener listener : listeners) listener.mouseReleased(userX(e.getX()), userY(e.getY())); } }
/** * This method cannot be called directly. */
This method cannot be called directly
mouseReleased
{ "repo_name": "pratiksanglikar/Data-Structures", "path": "src/ds/pratiksanglikar/lib/Draw.java", "license": "gpl-3.0", "size": 45443 }
[ "java.awt.event.MouseEvent" ]
import java.awt.event.MouseEvent;
import java.awt.event.*;
[ "java.awt" ]
java.awt;
2,080,902
public static Date valueOf (String str) { if (str == null) throw new IllegalArgumentException(); try { java.util.Date d = (java.util.Date) sdf.parseObject(str); if (d == null) throw new IllegalArgumentException(str); else return new Date(d.getTime()); } catch (ParseException e) { throw new IllegalArgumentException(str); } }
static Date function (String str) { if (str == null) throw new IllegalArgumentException(); try { java.util.Date d = (java.util.Date) sdf.parseObject(str); if (d == null) throw new IllegalArgumentException(str); else return new Date(d.getTime()); } catch (ParseException e) { throw new IllegalArgumentException(str); } }
/** * This method returns a new instance of this class by parsing a * date in JDBC format into a Java date. * * @param str The string to parse. * @return The resulting <code>java.sql.Date</code> value. */
This method returns a new instance of this class by parsing a date in JDBC format into a Java date
valueOf
{ "repo_name": "ivmai/JCGO", "path": "goclsp/clsp_fix/java/sql/Date.java", "license": "gpl-2.0", "size": 5402 }
[ "java.text.ParseException" ]
import java.text.ParseException;
import java.text.*;
[ "java.text" ]
java.text;
1,603,956
public void addLongHoldView(@IdRes int longHoldViewId, boolean receiveMultipleEvents) { longHoldViews.add(new LongHoldView(peekView.findViewById(longHoldViewId), receiveMultipleEvents)); }
void function(@IdRes int longHoldViewId, boolean receiveMultipleEvents) { longHoldViews.add(new LongHoldView(peekView.findViewById(longHoldViewId), receiveMultipleEvents)); }
/** * Specify id of view WITHIN the peek layout, this view will trigger on long hold events. * You can add multiple on long hold views * * @param longHoldViewId id of the view to receive on long hold events * @return */
Specify id of view WITHIN the peek layout, this view will trigger on long hold events. You can add multiple on long hold views
addLongHoldView
{ "repo_name": "anuj7sharma/SampleBoard", "path": "peeknpop/src/main/java/com/peekandpop/shalskar/peekandpop/PeekAndPop.java", "license": "mit", "size": 31780 }
[ "android.support.annotation.IdRes", "com.peekandpop.shalskar.peekandpop.model.LongHoldView" ]
import android.support.annotation.IdRes; import com.peekandpop.shalskar.peekandpop.model.LongHoldView;
import android.support.annotation.*; import com.peekandpop.shalskar.peekandpop.model.*;
[ "android.support", "com.peekandpop.shalskar" ]
android.support; com.peekandpop.shalskar;
809,228
public List<String> resources() { return this.resources; }
List<String> function() { return this.resources; }
/** * Get indicates resources which were responsible for the error. * * @return the resources value */
Get indicates resources which were responsible for the error
resources
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/loganalytics/microsoft-azure-loganalytics/src/main/java/com/microsoft/azure/loganalytics/models/ErrorDetail.java", "license": "mit", "size": 4163 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
185,127
static void checkSnapshot( INode target, List<INodeDirectory> snapshottableDirs) throws SnapshotException { if (target.isDirectory()) { INodeDirectory targetDir = target.asDirectory(); DirectorySnapshottableFeature sf = targetDir .getDirectorySnapshottableFeature(); if (sf != null) { if (sf.getNumSnapshots() > 0) { String fullPath = targetDir.getFullPathName(); throw new SnapshotException("The directory " + fullPath + " cannot be deleted since " + fullPath + " is snapshottable and already has snapshots"); } else { if (snapshottableDirs != null) { snapshottableDirs.add(targetDir); } } } for (INode child : targetDir.getChildrenList(Snapshot.CURRENT_STATE_ID)) { checkSnapshot(child, snapshottableDirs); } } }
static void checkSnapshot( INode target, List<INodeDirectory> snapshottableDirs) throws SnapshotException { if (target.isDirectory()) { INodeDirectory targetDir = target.asDirectory(); DirectorySnapshottableFeature sf = targetDir .getDirectorySnapshottableFeature(); if (sf != null) { if (sf.getNumSnapshots() > 0) { String fullPath = targetDir.getFullPathName(); throw new SnapshotException(STR + fullPath + STR + fullPath + STR); } else { if (snapshottableDirs != null) { snapshottableDirs.add(targetDir); } } } for (INode child : targetDir.getChildrenList(Snapshot.CURRENT_STATE_ID)) { checkSnapshot(child, snapshottableDirs); } } }
/** * Check if the given INode (or one of its descendants) is snapshottable and * already has snapshots. * * @param target The given INode * @param snapshottableDirs The list of directories that are snapshottable * but do not have snapshots yet */
Check if the given INode (or one of its descendants) is snapshottable and already has snapshots
checkSnapshot
{ "repo_name": "Bizyroth/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSnapshotOp.java", "license": "apache-2.0", "size": 8687 }
[ "java.util.List", "org.apache.hadoop.hdfs.protocol.SnapshotException", "org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature", "org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot" ]
import java.util.List; import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import java.util.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.namenode.snapshot.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,349,037
private void getPMDPublisher() { PmdPublisher pmd = new PmdPublisher(null, null, "low", null, false, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, false, false, false, false, "**/pmd.xml"); try { currentProj.getPublishersList().add(pmd); } catch (IOException ex) { Logger.getLogger(BasicConfigurationAction.class.getName()).log(Level.SEVERE, null, ex); } }
void function() { PmdPublisher pmd = new PmdPublisher(null, null, "low", null, false, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, false, false, false, false, STR); try { currentProj.getPublishersList().add(pmd); } catch (IOException ex) { Logger.getLogger(BasicConfigurationAction.class.getName()).log(Level.SEVERE, null, ex); } }
/** * getPMDPublisher() method adds the PMD Publisher Plugin to the job publisher list. */
getPMDPublisher() method adds the PMD Publisher Plugin to the job publisher list
getPMDPublisher
{ "repo_name": "eBay/EasyConfig", "path": "src/main/java/com/mycompany/simpleprojectconfiguration/BasicConfigurationAction.java", "license": "mit", "size": 33795 }
[ "hudson.plugins.pmd.PmdPublisher", "java.io.IOException", "java.util.logging.Level", "java.util.logging.Logger" ]
import hudson.plugins.pmd.PmdPublisher; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger;
import hudson.plugins.pmd.*; import java.io.*; import java.util.logging.*;
[ "hudson.plugins.pmd", "java.io", "java.util" ]
hudson.plugins.pmd; java.io; java.util;
432,382
if (isNullOrEmpty(str)) // empty string and null is not a number return false; final NumberFormat formatter = NumberFormat.getInstance(); final ParsePosition pos = new ParsePosition(0); formatter.parse(str, pos); return str.length() == pos.getIndex(); }
if (isNullOrEmpty(str)) return false; final NumberFormat formatter = NumberFormat.getInstance(); final ParsePosition pos = new ParsePosition(0); formatter.parse(str, pos); return str.length() == pos.getIndex(); }
/** * Checks whether the provided string is a number. * @param str The string. * @return True if the provided string is a number, false otherwise. */
Checks whether the provided string is a number
isNumber
{ "repo_name": "FTSRG/massif", "path": "maven/hu.bme.mit.massif.oslc.adaptor/src/main/java/hu/bme/mit/massif/oslc/adaptor/util/Utils.java", "license": "epl-1.0", "size": 1621 }
[ "java.text.NumberFormat", "java.text.ParsePosition" ]
import java.text.NumberFormat; import java.text.ParsePosition;
import java.text.*;
[ "java.text" ]
java.text;
845,068
ByteBuffer bb1 = ByteBuffer.allocate(1); bb1.put((byte)1); ByteBuffer bb2 = ByteBuffer.allocate(1); bb2.put((byte)0); ByteBuffer bb3 = ByteBuffer.allocate(1); bb3.put((byte)2); ByteBuffer bb4 = ByteBuffer.allocate(1); bb4.put((byte)3); MultiByteBuff mbb = new MultiByteBuff(bb1, bb2, bb3, bb4); assertEquals(256, mbb.getShortAfterPosition(0)); assertEquals(2, mbb.getShortAfterPosition(1)); assertEquals(515, mbb.getShortAfterPosition(2)); }
ByteBuffer bb1 = ByteBuffer.allocate(1); bb1.put((byte)1); ByteBuffer bb2 = ByteBuffer.allocate(1); bb2.put((byte)0); ByteBuffer bb3 = ByteBuffer.allocate(1); bb3.put((byte)2); ByteBuffer bb4 = ByteBuffer.allocate(1); bb4.put((byte)3); MultiByteBuff mbb = new MultiByteBuff(bb1, bb2, bb3, bb4); assertEquals(256, mbb.getShortAfterPosition(0)); assertEquals(2, mbb.getShortAfterPosition(1)); assertEquals(515, mbb.getShortAfterPosition(2)); }
/** * Test right answer though we span many sub-buffers. */
Test right answer though we span many sub-buffers
testGetShort
{ "repo_name": "ultratendency/hbase", "path": "hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java", "license": "apache-2.0", "size": 14820 }
[ "java.nio.ByteBuffer", "org.junit.Assert" ]
import java.nio.ByteBuffer; import org.junit.Assert;
import java.nio.*; import org.junit.*;
[ "java.nio", "org.junit" ]
java.nio; org.junit;
2,206,711
void setToMicroschemaVersion(HibMicroschemaVersion toVersion);
void setToMicroschemaVersion(HibMicroschemaVersion toVersion);
/** * Set the microschema version reference. * * @param toVersion */
Set the microschema version reference
setToMicroschemaVersion
{ "repo_name": "gentics/mesh", "path": "mdm/api/src/main/java/com/gentics/mesh/core/data/job/HibJob.java", "license": "apache-2.0", "size": 7321 }
[ "com.gentics.mesh.core.data.schema.HibMicroschemaVersion" ]
import com.gentics.mesh.core.data.schema.HibMicroschemaVersion;
import com.gentics.mesh.core.data.schema.*;
[ "com.gentics.mesh" ]
com.gentics.mesh;
2,580,795
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE", justification="cluster Id znode read would give us correct response") private void initializeZooKeeper() throws IOException, InterruptedException { // Nothing to do in here if no Master in the mix. if (this.masterless) return; // Create the master address tracker, register with zk, and start it. Then // block until a master is available. No point in starting up if no master // running. blockAndCheckIfStopped(this.masterAddressTracker); // Wait on cluster being up. Master will set this flag up in zookeeper // when ready. blockAndCheckIfStopped(this.clusterStatusTracker); doLatch(this.initLatch); // Retrieve clusterId // Since cluster status is now up // ID should have already been set by HMaster try { clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper); if (clusterId == null) { this.abort("Cluster ID has not been set"); } LOG.info("ClusterId : "+clusterId); } catch (KeeperException e) { this.abort("Failed to retrieve Cluster ID",e); } // In case colocated master, wait here till it's active. // So backup masters won't start as regionservers. // This is to avoid showing backup masters as regionservers // in master web UI, or assigning any region to them. waitForMasterActive(); if (isStopped() || isAborted()) { return; // No need for further initialization } // watch for snapshots and other procedures try { rspmHost = new RegionServerProcedureManagerHost(); rspmHost.loadProcedures(conf); rspmHost.initialize(this); } catch (KeeperException e) { this.abort("Failed to reach coordination cluster when creating procedure handler.", e); } // register watcher for recovering regions this.recoveringRegionWatcher = new RecoveringRegionWatcher(this.zooKeeper, this); }
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value=STR, justification=STR) void function() throws IOException, InterruptedException { if (this.masterless) return; blockAndCheckIfStopped(this.masterAddressTracker); blockAndCheckIfStopped(this.clusterStatusTracker); doLatch(this.initLatch); try { clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper); if (clusterId == null) { this.abort(STR); } LOG.info(STR+clusterId); } catch (KeeperException e) { this.abort(STR,e); } waitForMasterActive(); if (isStopped() isAborted()) { return; } try { rspmHost = new RegionServerProcedureManagerHost(); rspmHost.loadProcedures(conf); rspmHost.initialize(this); } catch (KeeperException e) { this.abort(STR, e); } this.recoveringRegionWatcher = new RecoveringRegionWatcher(this.zooKeeper, this); }
/** * Bring up connection to zk ensemble and then wait until a master for this * cluster and then after that, wait until cluster 'up' flag has been set. * This is the order in which master does things. * Finally open long-living server short-circuit connection. * @throws IOException * @throws InterruptedException */
Bring up connection to zk ensemble and then wait until a master for this cluster and then after that, wait until cluster 'up' flag has been set. This is the order in which master does things. Finally open long-living server short-circuit connection
initializeZooKeeper
{ "repo_name": "vincentpoon/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java", "license": "apache-2.0", "size": 147579 }
[ "java.io.IOException", "org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost", "org.apache.hadoop.hbase.zookeeper.RecoveringRegionWatcher", "org.apache.hadoop.hbase.zookeeper.ZKClusterId", "org.apache.zookeeper.KeeperException" ]
import java.io.IOException; import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; import org.apache.hadoop.hbase.zookeeper.RecoveringRegionWatcher; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.zookeeper.KeeperException;
import java.io.*; import org.apache.hadoop.hbase.procedure.*; import org.apache.hadoop.hbase.zookeeper.*; import org.apache.zookeeper.*;
[ "java.io", "org.apache.hadoop", "org.apache.zookeeper" ]
java.io; org.apache.hadoop; org.apache.zookeeper;
1,470,553
public DbGroupDao getDbGroupDao() { return getDao(DbGroupDao.class); }
DbGroupDao function() { return getDao(DbGroupDao.class); }
/** * Retrieves the singleton instance of {@link DbGroupDao}. * * @return the dao */
Retrieves the singleton instance of <code>DbGroupDao</code>
getDbGroupDao
{ "repo_name": "jtux270/translate", "path": "ovirt/3.6_source/backend/manager/modules/dal/src/main/java/org/ovirt/engine/core/dal/dbbroker/DbFacade.java", "license": "gpl-3.0", "size": 42484 }
[ "org.ovirt.engine.core.dao.DbGroupDao" ]
import org.ovirt.engine.core.dao.DbGroupDao;
import org.ovirt.engine.core.dao.*;
[ "org.ovirt.engine" ]
org.ovirt.engine;
2,876,165
public void addToCompressorQueue(BIN bin, Key deletedKey, boolean doWakeup) { if (inCompressor != null) { inCompressor.addBinKeyToQueue(bin, deletedKey, doWakeup); } }
void function(BIN bin, Key deletedKey, boolean doWakeup) { if (inCompressor != null) { inCompressor.addBinKeyToQueue(bin, deletedKey, doWakeup); } }
/** * Tells the asynchronous IN compressor thread about a BIN with a deleted * entry. */
Tells the asynchronous IN compressor thread about a BIN with a deleted entry
addToCompressorQueue
{ "repo_name": "bjorndm/prebake", "path": "code/third_party/bdb/src/com/sleepycat/je/dbi/EnvironmentImpl.java", "license": "apache-2.0", "size": 87347 }
[ "com.sleepycat.je.tree.Key" ]
import com.sleepycat.je.tree.Key;
import com.sleepycat.je.tree.*;
[ "com.sleepycat.je" ]
com.sleepycat.je;
1,742,140
public void test_bug630syncToLatestScrewedWindowsPath() throws CommandStoppedException, BuildException, IOException, AgentFailureException { errorManager.clearAllActiveErrors(); TestHelper.setSourceControlProperty(TEST_BUILD_ID, SourceControlSetting.P4_DEPOT_PATH, "//test/abbdleclient/..."); perforce.reloadConfiguration(); perforce.checkoutLatest(); TestHelper.assertCheckoutDirNotEmpty(agent); assertEquals(0, errorManager.errorCount()); }
void function() throws CommandStoppedException, BuildException, IOException, AgentFailureException { errorManager.clearAllActiveErrors(); TestHelper.setSourceControlProperty(TEST_BUILD_ID, SourceControlSetting.P4_DEPOT_PATH, " perforce.reloadConfiguration(); perforce.checkoutLatest(); TestHelper.assertCheckoutDirNotEmpty(agent); assertEquals(0, errorManager.errorCount()); }
/** * Demonstrates that nothing happens whe mixed case directories * are checked out under Windows. */
Demonstrates that nothing happens whe mixed case directories are checked out under Windows
test_bug630syncToLatestScrewedWindowsPath
{ "repo_name": "simeshev/parabuild-ci", "path": "test/src/org/parabuild/ci/versioncontrol/perforce/SSTestP4SourceControl.java", "license": "lgpl-3.0", "size": 29667 }
[ "java.io.IOException", "org.parabuild.ci.TestHelper", "org.parabuild.ci.build.AgentFailureException", "org.parabuild.ci.common.BuildException", "org.parabuild.ci.common.CommandStoppedException", "org.parabuild.ci.object.SourceControlSetting" ]
import java.io.IOException; import org.parabuild.ci.TestHelper; import org.parabuild.ci.build.AgentFailureException; import org.parabuild.ci.common.BuildException; import org.parabuild.ci.common.CommandStoppedException; import org.parabuild.ci.object.SourceControlSetting;
import java.io.*; import org.parabuild.ci.*; import org.parabuild.ci.build.*; import org.parabuild.ci.common.*; import org.parabuild.ci.object.*;
[ "java.io", "org.parabuild.ci" ]
java.io; org.parabuild.ci;
2,760,579
@Override public String getBundleSpec(String instanceId) throws RemoteException;
String function(String instanceId) throws RemoteException;
/** * Return the specification of the instance, identified by the given ID. * @param instanceId The instance ID. * @return The specification for the instance. * @throws RemoteException If error occur while connection to instance over * RMI. */
Return the specification of the instance, identified by the given ID
getBundleSpec
{ "repo_name": "statsbiblioteket/summa", "path": "Core/src/main/java/dk/statsbiblioteket/summa/control/rmi/ClientRMIConnection.java", "license": "apache-2.0", "size": 5340 }
[ "java.rmi.RemoteException" ]
import java.rmi.RemoteException;
import java.rmi.*;
[ "java.rmi" ]
java.rmi;
12,222
public TerminologySet getCheckedTerminologySet(String uri) throws UnknownURIException { //My new test! if(!terminologySetExist(uri)) throw new UnknownURIException(uri); TerminologySetTDBImpl result=new TerminologySetTDBImpl(uri, this); return result; }
TerminologySet function(String uri) throws UnknownURIException { if(!terminologySetExist(uri)) throw new UnknownURIException(uri); TerminologySetTDBImpl result=new TerminologySetTDBImpl(uri, this); return result; }
/** * Returns a TerminologySet already known for a given URI. * Throw exception if not such TerminologySet has been defined. */
Returns a TerminologySet already known for a given URI. Throw exception if not such TerminologySet has been defined
getCheckedTerminologySet
{ "repo_name": "metarelate/terminology-server", "path": "moduleTmCore/src/main/java/net/metarelate/terminology/coreModel/TerminologyFactoryTDBImpl.java", "license": "gpl-3.0", "size": 12092 }
[ "net.metarelate.terminology.exceptions.UnknownURIException" ]
import net.metarelate.terminology.exceptions.UnknownURIException;
import net.metarelate.terminology.exceptions.*;
[ "net.metarelate.terminology" ]
net.metarelate.terminology;
2,235,672
HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks, List<Token<BlockTokenIdentifier>> tokens) throws IOException;
HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks, List<Token<BlockTokenIdentifier>> tokens) throws IOException;
/** * Retrieves volume location information about a list of blocks on a datanode. * This is in the form of an opaque {@link VolumeId} for each configured * data directory, which is not guaranteed to be the same across DN restarts. * * @param blocks * list of blocks on the local datanode * @param tokens * block access tokens corresponding to the requested blocks * @return an HdfsBlocksMetadata that associates {@link ExtendedBlock}s with * data directories * @throws IOException * if datanode is unreachable, or replica is not found on datanode */
Retrieves volume location information about a list of blocks on a datanode. This is in the form of an opaque <code>VolumeId</code> for each configured data directory, which is not guaranteed to be the same across DN restarts
getHdfsBlocksMetadata
{ "repo_name": "ict-carch/hadoop-plus", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java", "license": "apache-2.0", "size": 5066 }
[ "java.io.IOException", "java.util.List", "org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier", "org.apache.hadoop.security.token.Token" ]
import java.io.IOException; import java.util.List; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.security.token.Token;
import java.io.*; import java.util.*; import org.apache.hadoop.hdfs.security.token.block.*; import org.apache.hadoop.security.token.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
823,681
public DeliverSummary[] getUserDeliverSummary(String semester,String subject,int activityIndex) throws ExecPelpException,InvalidEngineException,AuthorizationException;
DeliverSummary[] function(String semester,String subject,int activityIndex) throws ExecPelpException,InvalidEngineException,AuthorizationException;
/** * Get a summarized information for all delivers for current user in a given activity * @param semester Semester code * @param subject Subject code * @param activityIndex Activity Index * @return Array of Object with summary information of the delivers * @throws ExecPelpException if some error accurs during process execution * @throws InvalidEngineException if the engine is not properly initialized * @throws AuthorizationException if user is not a teacher of this classroom */
Get a summarized information for all delivers for current user in a given activity
getUserDeliverSummary
{ "repo_name": "UOC/PeLP", "path": "src/main/java/edu/uoc/pelp/bussines/UOC/UOCPelpBussines.java", "license": "gpl-3.0", "size": 21812 }
[ "edu.uoc.pelp.bussines.exception.AuthorizationException", "edu.uoc.pelp.bussines.exception.InvalidEngineException", "edu.uoc.pelp.bussines.vo.DeliverSummary", "edu.uoc.pelp.exception.ExecPelpException" ]
import edu.uoc.pelp.bussines.exception.AuthorizationException; import edu.uoc.pelp.bussines.exception.InvalidEngineException; import edu.uoc.pelp.bussines.vo.DeliverSummary; import edu.uoc.pelp.exception.ExecPelpException;
import edu.uoc.pelp.bussines.exception.*; import edu.uoc.pelp.bussines.vo.*; import edu.uoc.pelp.exception.*;
[ "edu.uoc.pelp" ]
edu.uoc.pelp;
2,011,133
public static void setLabelAndMnemonic(AbstractButton btn, String inLabel) { setLabelAndMnemonic(new ButtonHolder(btn), inLabel); }
static void function(AbstractButton btn, String inLabel) { setLabelAndMnemonic(new ButtonHolder(btn), inLabel); }
/** * Ampersand indicates that the character after it is a mnemo, unless the * character is a space. In "Find & Replace", ampersand does not label * mnemo, while in "&About", mnemo is "Alt + A". */
Ampersand indicates that the character after it is a mnemo, unless the character is a space. In "Find & Replace", ampersand does not label mnemo, while in "&About", mnemo is "Alt + A"
setLabelAndMnemonic
{ "repo_name": "iwabuchiken/freemind_1.0.0_20140624_214725", "path": "freemind/main/Tools.java", "license": "gpl-2.0", "size": 56843 }
[ "javax.swing.AbstractButton" ]
import javax.swing.AbstractButton;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
1,802,158
void dir(Path path) throws HgCallbackTargetException, HgRuntimeException;
void dir(Path path) throws HgCallbackTargetException, HgRuntimeException;
/** * If walker is configured to spit out directories, indicates files from specified directories are about to be reported. * Comes prior to any files from this directory and subdirectories * * @param path directory known in the manifest * @throws HgCallbackTargetException wrapper for any exception user code may produce * @throws HgRuntimeException propagates library issues. <em>Runtime exception</em> */
If walker is configured to spit out directories, indicates files from specified directories are about to be reported. Comes prior to any files from this directory and subdirectories
dir
{ "repo_name": "CharlieKuharski/hg4j", "path": "src/org/tmatesoft/hg/core/HgManifestHandler.java", "license": "gpl-2.0", "size": 2968 }
[ "org.tmatesoft.hg.repo.HgRuntimeException", "org.tmatesoft.hg.util.Path" ]
import org.tmatesoft.hg.repo.HgRuntimeException; import org.tmatesoft.hg.util.Path;
import org.tmatesoft.hg.repo.*; import org.tmatesoft.hg.util.*;
[ "org.tmatesoft.hg" ]
org.tmatesoft.hg;
2,162,719
LSParser parser = createLSParser(); if (parser == null) { Assert.fail("Unable to create LSParser!"); }
LSParser parser = createLSParser(); if (parser == null) { Assert.fail(STR); }
/** * Equivalence class partitioning * with state, input and output values orientation * for public Document parse(LSInput is), * <br><b>pre-conditions</b>: set filter that REJECTs any CHILD* node, * <br><b>is</b>: xml1 * <br><b>output</b>: XML document with ELEMNENT1 and ELEMENT2 only. */
Equivalence class partitioning with state, input and output values orientation for public Document parse(LSInput is), pre-conditions: set filter that REJECTs any CHILD* node, is: xml1 output: XML document with ELEMNENT1 and ELEMENT2 only
testfilter0001
{ "repo_name": "lostdj/Jaklin-OpenJDK-JAXP", "path": "test/javax/xml/jaxp/unittest/org/w3c/dom/ls/LSParserTCKTest.java", "license": "gpl-2.0", "size": 20273 }
[ "org.testng.Assert" ]
import org.testng.Assert;
import org.testng.*;
[ "org.testng" ]
org.testng;
1,265,534
int parseSectionCount(String s) { if (s == null || s.length() == 0) { return 0; } return StringArray.split(s).length; }
int parseSectionCount(String s) { if (s == null s.length() == 0) { return 0; } return StringArray.split(s).length; }
/** * This method tolerates null. It expects a list of section names - basically * a space separated list and returns the number of items there. * @param s The section name list string to parse and count. May be null. * @return Number of sections listed in the string. Will be zero if the * input was null. */
This method tolerates null. It expects a list of section names - basically a space separated list and returns the number of items there
parseSectionCount
{ "repo_name": "otmarjr/jtreg-fork", "path": "dist-with-aspectj/jtreg/lib/javatest/com/sun/javatest/TestResult.java", "license": "gpl-2.0", "size": 100130 }
[ "com.sun.javatest.util.StringArray" ]
import com.sun.javatest.util.StringArray;
import com.sun.javatest.util.*;
[ "com.sun.javatest" ]
com.sun.javatest;
923,555
public static void saveSettings(){ settingsFilePath = "config.txt"; // Since we don't need to keep the settings from the log file after they have been loaded in we can simply rewrite the file using the values loaded in or set. // As you can imagine, multiple instances of the same config declaration is highly inefficient. It is because of this reason that unlike the log file we don't simply append more information to the config file. try { writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(settingsFilePath), "utf-8")); writer.write("uniqueReplacements=" + uniqueReplacements); writer.newLine(); writer.write("staticReplacement=" + staticReplacement); writer.newLine(); writer.write("enableLogging=" + enableLogging); writeLog("Settings saved to file!"); } catch (IOException e) { writeLog("Error: " + e.getMessage()); } finally { try {writer.close();} catch (Exception e) { writeLog("Error: " + e.getMessage()); } } }
static void function(){ settingsFilePath = STR; try { writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(settingsFilePath), "utf-8")); writer.write(STR + uniqueReplacements); writer.newLine(); writer.write(STR + staticReplacement); writer.newLine(); writer.write(STR + enableLogging); writeLog(STR); } catch (IOException e) { writeLog(STR + e.getMessage()); } finally { try {writer.close();} catch (Exception e) { writeLog(STR + e.getMessage()); } } }
/** * Saves the settings to a file to keep the settings persistent */
Saves the settings to a file to keep the settings persistent
saveSettings
{ "repo_name": "twist3dsoft/You-Cant-Say-That", "path": "Settings.java", "license": "gpl-3.0", "size": 12817 }
[ "java.io.BufferedWriter", "java.io.FileOutputStream", "java.io.IOException", "java.io.OutputStreamWriter" ]
import java.io.BufferedWriter; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter;
import java.io.*;
[ "java.io" ]
java.io;
821,980
public String toString(boolean sendList) { StringBuilder builder = new StringBuilder(); builder.append("ID: "); builder.append(id); builder.append("\n\tMESSAGES SEND *************************************\n\t\tTotal:\t"); builder.append(messagesSent); builder.append("\n\t\tPct.:\t"); DecimalFormat dcFormat = new DecimalFormat("#0.00"); builder.append(dcFormat.format(getPercentageMessageSend())); builder.append(" %\n\t\tRate:\t"); builder.append(getMessageSendRate()); builder.append(" Msg/ms\n\t\tMESSAGE SEND TO\n"); if (sendList) { for (String key : getSortedMessagesSendToList()) { builder.append("\t\t\t"); builder.append(sendToCountMap.get(key)); builder.append("|"); builder.append(dcFormat.format(getPercentageSendToComponent(key))); builder.append("% -> "); builder.append(key); builder.append("\n"); } } builder.append("\tMESSAGES RECEIVED *********************************\n\t\tTotal:\t"); builder.append(messagesReceived); builder.append("\n\t\tPct.:\t"); builder.append(dcFormat.format(getPercentageMessageReceived())); builder.append(" %\n\t\tRate:\t"); builder.append(getMessageReceiveRate()); builder.append(" Msg/ms\n\t\tAvg target:\t"); builder.append(dcFormat.format(getAveragePercentageAsTarget())); builder.append(" %\n\tSENT MSG DURRATION UNTIL RECEIVE ******************\n\t\tMessages totalTime:\t"); builder.append(totalMessageSentDuration); builder.append("ms\n\t\tMessages avgTime:\t"); builder.append(averageMessageSentDuration); builder.append("ms\n\t\tMessages longest:\t"); builder.append(longestMessageSentDuration); builder.append("ms\n\t\tMessages shortest:\t"); builder.append(shortestMessageSentDuration); builder.append("ms\n\tSTACKTRACE\n"); for (int i = 0; i < stackTrace.length; i++) { for (int x = 0; x < i + 1; x++) { builder.append("\t"); } builder.append(stackTrace[stackTrace.length - 1 - i]); builder.append("\n"); } return builder.toString(); }
String function(boolean sendList) { StringBuilder builder = new StringBuilder(); builder.append(STR); builder.append(id); builder.append(STR); builder.append(messagesSent); builder.append(STR); DecimalFormat dcFormat = new DecimalFormat("#0.00"); builder.append(dcFormat.format(getPercentageMessageSend())); builder.append(STR); builder.append(getMessageSendRate()); builder.append(STR); if (sendList) { for (String key : getSortedMessagesSendToList()) { builder.append(STR); builder.append(sendToCountMap.get(key)); builder.append(" "); builder.append(dcFormat.format(getPercentageSendToComponent(key))); builder.append(STR); builder.append(key); builder.append("\n"); } } builder.append(STR); builder.append(messagesReceived); builder.append(STR); builder.append(dcFormat.format(getPercentageMessageReceived())); builder.append(STR); builder.append(getMessageReceiveRate()); builder.append(STR); builder.append(dcFormat.format(getAveragePercentageAsTarget())); builder.append(STR); builder.append(totalMessageSentDuration); builder.append(STR); builder.append(averageMessageSentDuration); builder.append(STR); builder.append(longestMessageSentDuration); builder.append(STR); builder.append(shortestMessageSentDuration); builder.append(STR); for (int i = 0; i < stackTrace.length; i++) { for (int x = 0; x < i + 1; x++) { builder.append("\t"); } builder.append(stackTrace[stackTrace.length - 1 - i]); builder.append("\n"); } return builder.toString(); }
/** * Creates a string for the component. * * @param sendList * if send list exists * @return string representation of this component */
Creates a string for the component
toString
{ "repo_name": "sopeco/DynamicSpotter-Extensions", "path": "org.spotter.ext.detection.collection/src/org/spotter/ext/detection/blob/Component.java", "license": "apache-2.0", "size": 12292 }
[ "java.text.DecimalFormat" ]
import java.text.DecimalFormat;
import java.text.*;
[ "java.text" ]
java.text;
2,010,723
public ConnectionMonitorDestination destination() { return this.destination; }
ConnectionMonitorDestination function() { return this.destination; }
/** * Get describes the destination of connection monitor. * * @return the destination value */
Get describes the destination of connection monitor
destination
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_04_01/src/main/java/com/microsoft/azure/management/network/v2019_04_01/implementation/ConnectionMonitorInner.java", "license": "mit", "size": 4908 }
[ "com.microsoft.azure.management.network.v2019_04_01.ConnectionMonitorDestination" ]
import com.microsoft.azure.management.network.v2019_04_01.ConnectionMonitorDestination;
import com.microsoft.azure.management.network.v2019_04_01.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
1,212,540
default AdvancedJSR356WebSocketEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { doSetProperty("exchangePattern", exchangePattern); return this; }
default AdvancedJSR356WebSocketEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { doSetProperty(STR, exchangePattern); return this; }
/** * Sets the exchange pattern when the consumer creates an exchange. * * The option is a: <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */
Sets the exchange pattern when the consumer creates an exchange. The option is a: <code>org.apache.camel.ExchangePattern</code> type. Group: consumer (advanced)
exchangePattern
{ "repo_name": "objectiser/camel", "path": "core/camel-endpointdsl/src/main/java/org/apache/camel/builder/endpoint/dsl/JSR356WebSocketEndpointBuilderFactory.java", "license": "apache-2.0", "size": 19351 }
[ "org.apache.camel.ExchangePattern" ]
import org.apache.camel.ExchangePattern;
import org.apache.camel.*;
[ "org.apache.camel" ]
org.apache.camel;
696,692
public void setClearBtnColor(int color) { mClearBtnColor = color; DrawableCompat.setTint(mIconClear, mClearBtnColor); }
void function(int color) { mClearBtnColor = color; DrawableCompat.setTint(mIconClear, mClearBtnColor); }
/** * Sets the clear button's color. * * @param color the color to be applied to the * clear button. */
Sets the clear button's color
setClearBtnColor
{ "repo_name": "btcontract/lnwallet", "path": "app/src/main/java/com/arlib/floatingsearchview/FloatingSearchView.java", "license": "apache-2.0", "size": 31407 }
[ "android.support.v4.graphics.drawable.DrawableCompat" ]
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v4.graphics.drawable.*;
[ "android.support" ]
android.support;
403,631
public void add(WXDomObject child, int index) { if (child == null || index < -1 || sDestroy.get()) { return; } if (mDomChildren == null) { mDomChildren = new ArrayList<>(); } int count = mDomChildren.size(); index = index >= count ? -1 : index; if (index == -1) { mDomChildren.add(child); super.addChildAt(child, super.getChildCount()); } else { mDomChildren.add(index, child); super.addChildAt(child, index); } child.parent = this; }
void function(WXDomObject child, int index) { if (child == null index < -1 sDestroy.get()) { return; } if (mDomChildren == null) { mDomChildren = new ArrayList<>(); } int count = mDomChildren.size(); index = index >= count ? -1 : index; if (index == -1) { mDomChildren.add(child); super.addChildAt(child, super.getChildCount()); } else { mDomChildren.add(index, child); super.addChildAt(child, index); } child.parent = this; }
/** * Add the given WXDomObject as this object's child at specified index. * @param child the child to be added * @param index the index of child to be added. If the index is -1, the child will be added * as the last child of current dom object. */
Add the given WXDomObject as this object's child at specified index
add
{ "repo_name": "weexext/ucar-weex-core", "path": "platforms/android/weex-sdk16/src/main/java/com/taobao/weex/dom/WXDomObject.java", "license": "apache-2.0", "size": 20361 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
1,471,739
protected void sequence_S_Experiment(ISerializationContext context, S_Experiment semanticObject) { genericSequencer.createSequence(context, semanticObject); }
void function(ISerializationContext context, S_Experiment semanticObject) { genericSequencer.createSequence(context, semanticObject); }
/** * Contexts: * S_Section returns S_Experiment * S_Experiment returns S_Experiment * GamlDefinition returns S_Experiment * VarDefinition returns S_Experiment * * Constraint: * (key=_ExperimentKey firstFacet='name:'? (name=Valid_ID | name=STRING) facets+=Facet* block=Block?) */
Contexts: S_Section returns S_Experiment S_Experiment returns S_Experiment GamlDefinition returns S_Experiment VarDefinition returns S_Experiment Constraint: (key=_ExperimentKey firstFacet='name:'? (name=Valid_ID | name=STRING) facets+=Facet* block=Block?)
sequence_S_Experiment
{ "repo_name": "gama-platform/gama", "path": "msi.gama.lang.gaml/src-gen/msi/gama/lang/gaml/serializer/AbstractGamlSemanticSequencer.java", "license": "gpl-3.0", "size": 77218 }
[ "org.eclipse.xtext.serializer.ISerializationContext" ]
import org.eclipse.xtext.serializer.ISerializationContext;
import org.eclipse.xtext.serializer.*;
[ "org.eclipse.xtext" ]
org.eclipse.xtext;
126,374
public void assertNotInLayoutOrScroll(String message) { if (mRecyclerView != null) { mRecyclerView.assertNotInLayoutOrScroll(message); } } /** * Defines whether the layout should be measured by the RecyclerView or the LayoutManager * wants to handle the layout measurements itself. * <p> * This method is usually called by the LayoutManager with value {@code true} if it wants * to support WRAP_CONTENT. If you are using a public LayoutManager but want to customize * the measurement logic, you can call this method with {@code false} and override * {@link LayoutManager#onMeasure(int, int)} to implement your custom measurement logic. * <p> * AutoMeasure is a convenience mechanism for LayoutManagers to easily wrap their content or * handle various specs provided by the RecyclerView's parent. * It works by calling {@link LayoutManager#onLayoutChildren(Recycler, State)} during an * {@link RecyclerView#onMeasure(int, int)} call, then calculating desired dimensions based * on children's positions. It does this while supporting all existing animation * capabilities of the RecyclerView. * <p> * AutoMeasure works as follows: * <ol> * <li>LayoutManager should call {@code setAutoMeasureEnabled(true)} to enable it. All of * the framework LayoutManagers use {@code auto-measure}.</li> * <li>When {@link RecyclerView#onMeasure(int, int)} is called, if the provided specs are * exact, RecyclerView will only call LayoutManager's {@code onMeasure} and return without * doing any layout calculation.</li> * <li>If one of the layout specs is not {@code EXACT}, the RecyclerView will start the * layout process in {@code onMeasure} call. It will process all pending Adapter updates and * decide whether to run a predictive layout or not. If it decides to do so, it will first * call {@link #onLayoutChildren(Recycler, State)} with {@link State#isPreLayout()} set to * {@code true}. At this stage, {@link #getWidth()} and {@link #getHeight()} will still * return the width and height of the RecyclerView as of the last layout calculation. * <p> * After handling the predictive case, RecyclerView will call * {@link #onLayoutChildren(Recycler, State)} with {@link State#isMeasuring()} set to * {@code true} and {@link State#isPreLayout()} set to {@code false}. The LayoutManager can * access the measurement specs via {@link #getHeight()}, {@link #getHeightMode()}, * {@link #getWidth()} and {@link #getWidthMode()}.</li> * <li>After the layout calculation, RecyclerView sets the measured width & height by * calculating the bounding box for the children (+ RecyclerView's padding). The * LayoutManagers can override {@link #setMeasuredDimension(Rect, int, int)} to choose * different values. For instance, GridLayoutManager overrides this value to handle the case * where if it is vertical and has 3 columns but only 2 items, it should still measure its * width to fit 3 items, not 2.</li> * <li>Any following on measure call to the RecyclerView will run * {@link #onLayoutChildren(Recycler, State)} with {@link State#isMeasuring()} set to * {@code true} and {@link State#isPreLayout()} set to {@code false}. RecyclerView will * take care of which views are actually added / removed / moved / changed for animations so * that the LayoutManager should not worry about them and handle each * {@link #onLayoutChildren(Recycler, State)} call as if it is the last one. * </li> * <li>When measure is complete and RecyclerView's * {@link #onLayout(boolean, int, int, int, int)} method is called, RecyclerView checks * whether it already did layout calculations during the measure pass and if so, it re-uses * that information. It may still decide to call {@link #onLayoutChildren(Recycler, State)}
void function(String message) { if (mRecyclerView != null) { mRecyclerView.assertNotInLayoutOrScroll(message); } } /** * Defines whether the layout should be measured by the RecyclerView or the LayoutManager * wants to handle the layout measurements itself. * <p> * This method is usually called by the LayoutManager with value {@code true} if it wants * to support WRAP_CONTENT. If you are using a public LayoutManager but want to customize * the measurement logic, you can call this method with {@code false} and override * {@link LayoutManager#onMeasure(int, int)} to implement your custom measurement logic. * <p> * AutoMeasure is a convenience mechanism for LayoutManagers to easily wrap their content or * handle various specs provided by the RecyclerView's parent. * It works by calling {@link LayoutManager#onLayoutChildren(Recycler, State)} during an * {@link RecyclerView#onMeasure(int, int)} call, then calculating desired dimensions based * on children's positions. It does this while supporting all existing animation * capabilities of the RecyclerView. * <p> * AutoMeasure works as follows: * <ol> * <li>LayoutManager should call {@code setAutoMeasureEnabled(true)} to enable it. All of * the framework LayoutManagers use {@code auto-measure}.</li> * <li>When {@link RecyclerView#onMeasure(int, int)} is called, if the provided specs are * exact, RecyclerView will only call LayoutManager's {@code onMeasure} and return without * doing any layout calculation.</li> * <li>If one of the layout specs is not {@code EXACT}, the RecyclerView will start the * layout process in {@code onMeasure} call. It will process all pending Adapter updates and * decide whether to run a predictive layout or not. If it decides to do so, it will first * call {@link #onLayoutChildren(Recycler, State)} with {@link State#isPreLayout()} set to * {@code true}. At this stage, {@link #getWidth()} and {@link #getHeight()} will still * return the width and height of the RecyclerView as of the last layout calculation. * <p> * After handling the predictive case, RecyclerView will call * {@link #onLayoutChildren(Recycler, State)} with {@link State#isMeasuring()} set to * {@code true} and {@link State#isPreLayout()} set to {@code false}. The LayoutManager can * access the measurement specs via {@link #getHeight()}, {@link #getHeightMode()}, * {@link #getWidth()} and {@link #getWidthMode()}.</li> * <li>After the layout calculation, RecyclerView sets the measured width & height by * calculating the bounding box for the children (+ RecyclerView's padding). The * LayoutManagers can override {@link #setMeasuredDimension(Rect, int, int)} to choose * different values. For instance, GridLayoutManager overrides this value to handle the case * where if it is vertical and has 3 columns but only 2 items, it should still measure its * width to fit 3 items, not 2.</li> * <li>Any following on measure call to the RecyclerView will run * {@link #onLayoutChildren(Recycler, State)} with {@link State#isMeasuring()} set to * {@code true} and {@link State#isPreLayout()} set to {@code false}. RecyclerView will * take care of which views are actually added / removed / moved / changed for animations so * that the LayoutManager should not worry about them and handle each * {@link #onLayoutChildren(Recycler, State)} call as if it is the last one. * </li> * <li>When measure is complete and RecyclerView's * {@link #onLayout(boolean, int, int, int, int)} method is called, RecyclerView checks * whether it already did layout calculations during the measure pass and if so, it re-uses * that information. It may still decide to call {@link #onLayoutChildren(Recycler, State)}
/** * Checks if RecyclerView is in the middle of a layout or scroll and throws an * {@link IllegalStateException} if it <b>is</b>. * * @param message The message for the exception. Can be null. * @see #assertInLayoutOrScroll(String) */
Checks if RecyclerView is in the middle of a layout or scroll and throws an <code>IllegalStateException</code> if it is
assertNotInLayoutOrScroll
{ "repo_name": "amirlotfi/Nikagram", "path": "app/src/main/java/ir/nikagram/messenger/support/widget/RecyclerView.java", "license": "gpl-2.0", "size": 482314 }
[ "android.graphics.Rect" ]
import android.graphics.Rect;
import android.graphics.*;
[ "android.graphics" ]
android.graphics;
23,265
protected void populateDefaults(DocumentDefaultsDefinition defaults, DocumentDefaultsDefinition parentDefaults, Element root) { String lazyInit = root.getAttribute(DEFAULT_LAZY_INIT_ATTRIBUTE); if (DEFAULT_VALUE.equals(lazyInit)) { lazyInit = parentDefaults != null ? parentDefaults.getLazyInit() : FALSE_VALUE; } defaults.setLazyInit(lazyInit); String merge = root.getAttribute(DEFAULT_MERGE_ATTRIBUTE); if (DEFAULT_VALUE.equals(merge)) { merge = parentDefaults != null ? parentDefaults.getMerge() : FALSE_VALUE; } defaults.setMerge(merge); String autowire = root.getAttribute(DEFAULT_AUTOWIRE_ATTRIBUTE); if (DEFAULT_VALUE.equals(autowire)) { autowire = parentDefaults != null ? parentDefaults.getAutowire() : AUTOWIRE_NO_VALUE; } defaults.setAutowire(autowire); // don't fall back to parentDefaults for dependency-check as it's no // longer supported in <beans> as of 3.0. Therefore, no nested <beans> // would ever need to fall back to it. defaults.setDependencyCheck(root.getAttribute(DEFAULT_DEPENDENCY_CHECK_ATTRIBUTE)); if (root.hasAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE)) { defaults.setAutowireCandidates(root.getAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setAutowireCandidates(parentDefaults.getAutowireCandidates()); } if (root.hasAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE)) { defaults.setInitMethod(root.getAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setInitMethod(parentDefaults.getInitMethod()); } if (root.hasAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE)) { defaults.setDestroyMethod(root.getAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setDestroyMethod(parentDefaults.getDestroyMethod()); } defaults.setSource(this.readerContext.extractSource(root)); }
void function(DocumentDefaultsDefinition defaults, DocumentDefaultsDefinition parentDefaults, Element root) { String lazyInit = root.getAttribute(DEFAULT_LAZY_INIT_ATTRIBUTE); if (DEFAULT_VALUE.equals(lazyInit)) { lazyInit = parentDefaults != null ? parentDefaults.getLazyInit() : FALSE_VALUE; } defaults.setLazyInit(lazyInit); String merge = root.getAttribute(DEFAULT_MERGE_ATTRIBUTE); if (DEFAULT_VALUE.equals(merge)) { merge = parentDefaults != null ? parentDefaults.getMerge() : FALSE_VALUE; } defaults.setMerge(merge); String autowire = root.getAttribute(DEFAULT_AUTOWIRE_ATTRIBUTE); if (DEFAULT_VALUE.equals(autowire)) { autowire = parentDefaults != null ? parentDefaults.getAutowire() : AUTOWIRE_NO_VALUE; } defaults.setAutowire(autowire); defaults.setDependencyCheck(root.getAttribute(DEFAULT_DEPENDENCY_CHECK_ATTRIBUTE)); if (root.hasAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE)) { defaults.setAutowireCandidates(root.getAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setAutowireCandidates(parentDefaults.getAutowireCandidates()); } if (root.hasAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE)) { defaults.setInitMethod(root.getAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setInitMethod(parentDefaults.getInitMethod()); } if (root.hasAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE)) { defaults.setDestroyMethod(root.getAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE)); } else if (parentDefaults != null) { defaults.setDestroyMethod(parentDefaults.getDestroyMethod()); } defaults.setSource(this.readerContext.extractSource(root)); }
/** * Populate the given DocumentDefaultsDefinition instance with the default lazy-init, * autowire, dependency check settings, init-method, destroy-method and merge settings. * Support nested 'beans' element use cases by falling back to * <literal>parentDefaults</literal> in case the defaults are not explicitly set * locally. * @param defaults the defaults to populate * @param parentDefaults the parent BeanDefinitionParserDelegate (if any) defaults to fall back to * @param root the root element of the current bean definition document (or nested beans element) */
Populate the given DocumentDefaultsDefinition instance with the default lazy-init, autowire, dependency check settings, init-method, destroy-method and merge settings. Support nested 'beans' element use cases by falling back to parentDefaults in case the defaults are not explicitly set locally
populateDefaults
{ "repo_name": "sunpy1106/SpringBeanLifeCycle", "path": "src/main/java/org/springframework/beans/factory/xml/BeanDefinitionParserDelegate.java", "license": "apache-2.0", "size": 55293 }
[ "org.w3c.dom.Element" ]
import org.w3c.dom.Element;
import org.w3c.dom.*;
[ "org.w3c.dom" ]
org.w3c.dom;
2,194,576
private State convertDeviceValueToOpenHabState(Class<? extends Item> itemType, String data) { State state = UnDefType.UNDEF; try { int index; if (itemType == SwitchItem.class) { index = Integer.parseInt(data); state = index == 0 ? OnOffType.OFF : OnOffType.ON; } else if (itemType == NumberItem.class) { index = Integer.parseInt(data); state = new DecimalType(index); } else if (itemType == DimmerItem.class) { index = Integer.parseInt(data); state = new PercentType(index); } else if (itemType == RollershutterItem.class) { index = Integer.parseInt(data); state = new PercentType(index); } else if (itemType == StringItem.class) { // s = data.substring(3, data.length()); state = new StringType(data); } } catch (Exception e) { logger.debug("Cannot convert value '{}' to data type {}", data, itemType); } // logger.debug("Convert value=" + data + " to openhab type=" // + itemType.toString() + " val=" + state.toString()); return state; }
State function(Class<? extends Item> itemType, String data) { State state = UnDefType.UNDEF; try { int index; if (itemType == SwitchItem.class) { index = Integer.parseInt(data); state = index == 0 ? OnOffType.OFF : OnOffType.ON; } else if (itemType == NumberItem.class) { index = Integer.parseInt(data); state = new DecimalType(index); } else if (itemType == DimmerItem.class) { index = Integer.parseInt(data); state = new PercentType(index); } else if (itemType == RollershutterItem.class) { index = Integer.parseInt(data); state = new PercentType(index); } else if (itemType == StringItem.class) { state = new StringType(data); } } catch (Exception e) { logger.debug(STR, data, itemType); } return state; }
/** * Convert receiver value to OpenHAB state. * * @param itemType * @param data * * @return */
Convert receiver value to OpenHAB state
convertDeviceValueToOpenHabState
{ "repo_name": "lewie/openhab", "path": "bundles/binding/org.openhab.binding.lgtv/src/main/java/org/openhab/binding/lgtv/internal/LgtvBinding.java", "license": "epl-1.0", "size": 21015 }
[ "org.openhab.core.items.Item", "org.openhab.core.library.items.DimmerItem", "org.openhab.core.library.items.NumberItem", "org.openhab.core.library.items.RollershutterItem", "org.openhab.core.library.items.StringItem", "org.openhab.core.library.items.SwitchItem", "org.openhab.core.library.types.DecimalType", "org.openhab.core.library.types.OnOffType", "org.openhab.core.library.types.PercentType", "org.openhab.core.library.types.StringType", "org.openhab.core.types.State", "org.openhab.core.types.UnDefType" ]
import org.openhab.core.items.Item; import org.openhab.core.library.items.DimmerItem; import org.openhab.core.library.items.NumberItem; import org.openhab.core.library.items.RollershutterItem; import org.openhab.core.library.items.StringItem; import org.openhab.core.library.items.SwitchItem; import org.openhab.core.library.types.DecimalType; import org.openhab.core.library.types.OnOffType; import org.openhab.core.library.types.PercentType; import org.openhab.core.library.types.StringType; import org.openhab.core.types.State; import org.openhab.core.types.UnDefType;
import org.openhab.core.items.*; import org.openhab.core.library.items.*; import org.openhab.core.library.types.*; import org.openhab.core.types.*;
[ "org.openhab.core" ]
org.openhab.core;
2,094,481
private void addManagementMetrics(Collection<Metric<?>> result) { try { // Add JVM up time in ms result.add(new Metric<Long>("uptime", ManagementFactory.getRuntimeMXBean().getUptime())); result.add(new Metric<Double>("systemload.average", ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage())); addHeapMetrics(result); addThreadMetrics(result); addClassLoadingMetrics(result); addGarbageCollectionMetrics(result); } catch (NoClassDefFoundError ex) { // Expected on Google App Engine } }
void function(Collection<Metric<?>> result) { try { result.add(new Metric<Long>(STR, ManagementFactory.getRuntimeMXBean().getUptime())); result.add(new Metric<Double>(STR, ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage())); addHeapMetrics(result); addThreadMetrics(result); addClassLoadingMetrics(result); addGarbageCollectionMetrics(result); } catch (NoClassDefFoundError ex) { } }
/** * Add metrics from ManagementFactory if possible. Note that ManagementFactory is not * available on Google App Engine. * @param result the result */
Add metrics from ManagementFactory if possible. Note that ManagementFactory is not available on Google App Engine
addManagementMetrics
{ "repo_name": "srikalyan/spring-boot", "path": "spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/SystemPublicMetrics.java", "license": "apache-2.0", "size": 5551 }
[ "java.lang.management.ManagementFactory", "java.util.Collection", "org.springframework.boot.actuate.metrics.Metric" ]
import java.lang.management.ManagementFactory; import java.util.Collection; import org.springframework.boot.actuate.metrics.Metric;
import java.lang.management.*; import java.util.*; import org.springframework.boot.actuate.metrics.*;
[ "java.lang", "java.util", "org.springframework.boot" ]
java.lang; java.util; org.springframework.boot;
2,882,465
public final Collection<String> getAudience() { return audience; } /** * Verifies that the given ID token is valid using the cached public keys. * * <p>It verifies: * * <ul> * <li>The issuer is one of {@link #getIssuers()} by calling {@link * IdToken#verifyIssuer(String)}. * <li>The audience is one of {@link #getAudience()} by calling {@link * IdToken#verifyAudience(Collection)}. * <li>The current time against the issued at and expiration time, using the {@link #getClock()}
final Collection<String> function() { return audience; } /** * Verifies that the given ID token is valid using the cached public keys. * * <p>It verifies: * * <ul> * <li>The issuer is one of {@link #getIssuers()} by calling { * IdToken#verifyIssuer(String)}. * <li>The audience is one of {@link #getAudience()} by calling { * IdToken#verifyAudience(Collection)}. * <li>The current time against the issued at and expiration time, using the {@link #getClock()}
/** * Returns the unmodifiable list of trusted audience client IDs or {@code null} to suppress the * audience check. */
Returns the unmodifiable list of trusted audience client IDs or null to suppress the audience check
getAudience
{ "repo_name": "googleapis/google-oauth-java-client", "path": "google-oauth-client/src/main/java/com/google/api/client/auth/openidconnect/IdTokenVerifier.java", "license": "apache-2.0", "size": 9107 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
776,640
public String[] getTaskDiagnostics(TaskAttemptID taskid) throws IOException{ return new String [0]; }
String[] function(TaskAttemptID taskid) throws IOException{ return new String [0]; }
/** * Returns the diagnostic information for a particular task in the given job. * To be implemented */
Returns the diagnostic information for a particular task in the given job. To be implemented
getTaskDiagnostics
{ "repo_name": "IMCG/priter", "path": "src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java", "license": "apache-2.0", "size": 14168 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,238,998
@Test public void testSimplePutDelete() throws Exception { LOG.info("testSimplePutDelete"); Put put = new Put(row); put.add(famName, row, row); htable1 = new HTable(conf1, tableName); htable1.put(put); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail("Waited too much time for put replication"); } Result res = htable2.get(get); if (res.size() == 0) { LOG.info("Row not available"); Thread.sleep(SLEEP_TIME); } else { assertArrayEquals(res.value(), row); break; } } Delete del = new Delete(row); htable1.delete(del); get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail("Waited too much time for del replication"); } Result res = htable2.get(get); if (res.size() >= 1) { LOG.info("Row not deleted"); Thread.sleep(SLEEP_TIME); } else { break; } } }
void function() throws Exception { LOG.info(STR); Put put = new Put(row); put.add(famName, row, row); htable1 = new HTable(conf1, tableName); htable1.put(put); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail(STR); } Result res = htable2.get(get); if (res.size() == 0) { LOG.info(STR); Thread.sleep(SLEEP_TIME); } else { assertArrayEquals(res.value(), row); break; } } Delete del = new Delete(row); htable1.delete(del); get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail(STR); } Result res = htable2.get(get); if (res.size() >= 1) { LOG.info(STR); Thread.sleep(SLEEP_TIME); } else { break; } } }
/** * Add a row, check it's replicated, delete it, check's gone * @throws Exception */
Add a row, check it's replicated, delete it, check's gone
testSimplePutDelete
{ "repo_name": "simplegeo/hadoop-hbase", "path": "src/test/java/org/apache/hadoop/hbase/replication/TestReplication.java", "license": "apache-2.0", "size": 19302 }
[ "org.apache.hadoop.hbase.client.Delete", "org.apache.hadoop.hbase.client.Get", "org.apache.hadoop.hbase.client.HTable", "org.apache.hadoop.hbase.client.Put", "org.apache.hadoop.hbase.client.Result", "org.junit.Assert" ]
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.junit.Assert;
import org.apache.hadoop.hbase.client.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
2,020,750
protected Description attemptParseDescription(Node node) throws XPathException { String urn = (String) DOMUtil.compileXPathExpr("@rdf:about", nc).evaluate(node, XPathConstants.STRING); if (urn == null || urn.isEmpty()) { return null; } Description desc = new Description(urn); desc.setBroader(attemptParseRelations(node, "skos:broader")); desc.setNarrower(attemptParseRelations(node, "skos:narrower")); desc.setRelated(attemptParseRelations(node, "skos:related")); desc.setTopConcepts(attemptParseRelations(node, "skos:hasTopConcept")); return desc; }
Description function(Node node) throws XPathException { String urn = (String) DOMUtil.compileXPathExpr(STR, nc).evaluate(node, XPathConstants.STRING); if (urn == null urn.isEmpty()) { return null; } Description desc = new Description(urn); desc.setBroader(attemptParseRelations(node, STR)); desc.setNarrower(attemptParseRelations(node, STR)); desc.setRelated(attemptParseRelations(node, STR)); desc.setTopConcepts(attemptParseRelations(node, STR)); return desc; }
/** * Given a rdf:Description node, parse it into a Description object. Unless the node defines all related Descriptions inline the resulting Description node * will be populated with related Descriptions that have the 'href' flag set. * * @throws XPathException */
Given a rdf:Description node, parse it into a Description object. Unless the node defines all related Descriptions inline the resulting Description node will be populated with related Descriptions that have the 'href' flag set
attemptParseDescription
{ "repo_name": "jia020/portal-core", "path": "src/main/java/org/auscope/portal/core/services/responses/vocab/DescriptionFactory.java", "license": "lgpl-3.0", "size": 11074 }
[ "javax.xml.xpath.XPathConstants", "javax.xml.xpath.XPathException", "org.auscope.portal.core.util.DOMUtil", "org.w3c.dom.Node" ]
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathException; import org.auscope.portal.core.util.DOMUtil; import org.w3c.dom.Node;
import javax.xml.xpath.*; import org.auscope.portal.core.util.*; import org.w3c.dom.*;
[ "javax.xml", "org.auscope.portal", "org.w3c.dom" ]
javax.xml; org.auscope.portal; org.w3c.dom;
2,626,432
public static List<String> readProgramOutputStream (String path) throws IOException { path = checkNull("path can't be null", path); path = checkNotEmptyString("path must have a value", path); Process process = Runtime.getRuntime().exec(path); BufferedReader input = new BufferedReader(new InputStreamReader(process.getInputStream()) ); String line = null ; List<String> result = new ArrayList<String>(); while ( (line = input.readLine() ) != null ) { result.add( line ); } input.close(); return ( result ); }
static List<String> function (String path) throws IOException { path = checkNull(STR, path); path = checkNotEmptyString(STR, path); Process process = Runtime.getRuntime().exec(path); BufferedReader input = new BufferedReader(new InputStreamReader(process.getInputStream()) ); String line = null ; List<String> result = new ArrayList<String>(); while ( (line = input.readLine() ) != null ) { result.add( line ); } input.close(); return ( result ); }
/** * execute command line utility and read the output stream from it * @param path is the path for command line utility * @return the result of output stream as list of string * @throws IOException if the utility is not found * @throws NullPointerException if the path contain null data or empty string */
execute command line utility and read the output stream from it
readProgramOutputStream
{ "repo_name": "CoEIA/DEM", "path": "src/edu/coeia/util/FileUtil.java", "license": "gpl-3.0", "size": 12345 }
[ "edu.coeia.util.PreconditionsChecker", "java.io.IOException" ]
import edu.coeia.util.PreconditionsChecker; import java.io.IOException;
import edu.coeia.util.*; import java.io.*;
[ "edu.coeia.util", "java.io" ]
edu.coeia.util; java.io;
2,207,703
public H2FeatureService getSchaAttr() { return schaAttr; }
H2FeatureService function() { return schaAttr; }
/** * DOCUMENT ME! * * @return the schaAttr */
DOCUMENT ME
getSchaAttr
{ "repo_name": "cismet/watergis-client", "path": "src/main/java/de/cismet/watergis/gui/actions/checks/BauwerkeCheckAction.java", "license": "lgpl-3.0", "size": 182528 }
[ "de.cismet.cismap.commons.featureservice.H2FeatureService" ]
import de.cismet.cismap.commons.featureservice.H2FeatureService;
import de.cismet.cismap.commons.featureservice.*;
[ "de.cismet.cismap" ]
de.cismet.cismap;
1,374,668
HdfsFileStatus getFileInfo(String src) throws IOException { if (isPermissionEnabled) { checkTraverse(src); } return dir.getFileInfo(src); }
HdfsFileStatus getFileInfo(String src) throws IOException { if (isPermissionEnabled) { checkTraverse(src); } return dir.getFileInfo(src); }
/** Get the file info for a specific file. * @param src The string representation of the path to the file * @throws IOException if permission to access file is denied by the system * @return object containing information regarding the file * or null if file not found */
Get the file info for a specific file
getFileInfo
{ "repo_name": "aseldawy/spatialhadoop", "path": "src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java", "license": "apache-2.0", "size": 220549 }
[ "java.io.IOException", "org.apache.hadoop.hdfs.protocol.HdfsFileStatus" ]
import java.io.IOException; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import java.io.*; import org.apache.hadoop.hdfs.protocol.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
398,652
public LinkedBindingBuilder<HttpRequestFilter> addFilterBinding() { return multibinder.addBinding(); }
LinkedBindingBuilder<HttpRequestFilter> function() { return multibinder.addBinding(); }
/** * See the EDSL examples at {@link HttpClientBinder}. */
See the EDSL examples at <code>HttpClientBinder</code>
addFilterBinding
{ "repo_name": "gwittel/platform", "path": "http-client/src/main/java/com/proofpoint/http/client/HttpClientBinder.java", "license": "apache-2.0", "size": 18330 }
[ "com.google.inject.binder.LinkedBindingBuilder" ]
import com.google.inject.binder.LinkedBindingBuilder;
import com.google.inject.binder.*;
[ "com.google.inject" ]
com.google.inject;
1,455,333
static String createSnapshot( FSDirectory fsd, FSPermissionChecker pc, SnapshotManager snapshotManager, String snapshotRoot, String snapshotName, boolean logRetryCache) throws IOException { final INodesInPath iip = fsd.resolvePath(pc, snapshotRoot, DirOp.WRITE); if (fsd.isPermissionEnabled()) { fsd.checkOwner(pc, iip); } if (snapshotName == null || snapshotName.isEmpty()) { snapshotName = Snapshot.generateDefaultSnapshotName(); } else if (!DFSUtil.isValidNameForComponent(snapshotName)) { throw new InvalidPathException("Invalid snapshot name: " + snapshotName); } String snapshotPath; verifySnapshotName(fsd, snapshotName, snapshotRoot); // time of snapshot creation final long now = Time.now(); fsd.writeLock(); try { snapshotPath = snapshotManager.createSnapshot( fsd.getFSNamesystem().getLeaseManager(), iip, snapshotRoot, snapshotName, now); } finally { fsd.writeUnlock(); } fsd.getEditLog().logCreateSnapshot(snapshotRoot, snapshotName, logRetryCache, now); return snapshotPath; }
static String createSnapshot( FSDirectory fsd, FSPermissionChecker pc, SnapshotManager snapshotManager, String snapshotRoot, String snapshotName, boolean logRetryCache) throws IOException { final INodesInPath iip = fsd.resolvePath(pc, snapshotRoot, DirOp.WRITE); if (fsd.isPermissionEnabled()) { fsd.checkOwner(pc, iip); } if (snapshotName == null snapshotName.isEmpty()) { snapshotName = Snapshot.generateDefaultSnapshotName(); } else if (!DFSUtil.isValidNameForComponent(snapshotName)) { throw new InvalidPathException(STR + snapshotName); } String snapshotPath; verifySnapshotName(fsd, snapshotName, snapshotRoot); final long now = Time.now(); fsd.writeLock(); try { snapshotPath = snapshotManager.createSnapshot( fsd.getFSNamesystem().getLeaseManager(), iip, snapshotRoot, snapshotName, now); } finally { fsd.writeUnlock(); } fsd.getEditLog().logCreateSnapshot(snapshotRoot, snapshotName, logRetryCache, now); return snapshotPath; }
/** * Create a snapshot * @param fsd FS directory * @param pc FS permission checker * @param snapshotRoot The directory path where the snapshot is taken * @param snapshotName The name of the snapshot * @param logRetryCache whether to record RPC ids in editlog for retry cache * rebuilding. */
Create a snapshot
createSnapshot
{ "repo_name": "plusplusjiajia/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSnapshotOp.java", "license": "apache-2.0", "size": 12877 }
[ "java.io.IOException", "org.apache.hadoop.fs.InvalidPathException", "org.apache.hadoop.hdfs.DFSUtil", "org.apache.hadoop.hdfs.server.namenode.FSDirectory", "org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot", "org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotManager", "org.apache.hadoop.util.Time" ]
import java.io.IOException; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotManager; import org.apache.hadoop.util.Time;
import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.hdfs.server.namenode.snapshot.*; import org.apache.hadoop.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,508,217
public void visitTabularLevel( TabularLevel obj ) { DesignVisitorImpl.this.visitTabularLevel( obj.handle( module ) ); }
void function( TabularLevel obj ) { DesignVisitorImpl.this.visitTabularLevel( obj.handle( module ) ); }
/** * Visits the level element. * * @param obj * the level element */
Visits the level element
visitTabularLevel
{ "repo_name": "rrimmana/birt-1", "path": "model/org.eclipse.birt.report.model/src/org/eclipse/birt/report/model/api/DesignVisitorImpl.java", "license": "epl-1.0", "size": 33442 }
[ "org.eclipse.birt.report.model.elements.olap.TabularLevel" ]
import org.eclipse.birt.report.model.elements.olap.TabularLevel;
import org.eclipse.birt.report.model.elements.olap.*;
[ "org.eclipse.birt" ]
org.eclipse.birt;
58,783
private Object[] buildEmptyRow() { return RowDataUtil.allocateRowData( data.outputRowMeta.size() ); }
Object[] function() { return RowDataUtil.allocateRowData( data.outputRowMeta.size() ); }
/** * Build an empty row based on the meta-data. * * @return empty row built */
Build an empty row based on the meta-data
buildEmptyRow
{ "repo_name": "emartin-pentaho/pentaho-kettle", "path": "plugins/xml/core/src/main/java/org/pentaho/di/trans/steps/getxmldata/GetXMLData.java", "license": "apache-2.0", "size": 37899 }
[ "org.pentaho.di.core.row.RowDataUtil" ]
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.*;
[ "org.pentaho.di" ]
org.pentaho.di;
91,651
@ApiModelProperty(example = "null", value = "") public String getAdditionalInformation() { return additionalInformation; }
@ApiModelProperty(example = "null", value = "") String function() { return additionalInformation; }
/** * Get additionalInformation * @return additionalInformation **/
Get additionalInformation
getAdditionalInformation
{ "repo_name": "Metatavu/kunta-api-spec", "path": "java-client-generated/src/main/java/fi/metatavu/kuntaapi/client/model/Phone.java", "license": "agpl-3.0", "size": 7181 }
[ "io.swagger.annotations.ApiModelProperty" ]
import io.swagger.annotations.ApiModelProperty;
import io.swagger.annotations.*;
[ "io.swagger.annotations" ]
io.swagger.annotations;
1,176,553
public void paintHighlighting(GL2 gl, Tree<ATreeMapNode> tree, SelectionManager selection) { gl.glNewList(highlightList, GL2.GL_COMPILE); // for (int id : selection.getElements(SelectionType.MOUSE_OVER)) { // ATreeMapNode node = tree.getNodeByNumber(id); // if (node != null) // paintRectangle(gl, node.getMinX(), node.getMinY(), node.getMaxX(), // node.getMaxY(), // SelectionType.MOUSE_OVER.getColor(),SELECTION_LINE_WIDTH); // } // // for (int id : selection.getElements(SelectionType.SELECTION)) { // ATreeMapNode node = tree.getNodeByNumber(id); // if (node != null) // paintRectangle(gl, node.getMinX(), node.getMinY(), node.getMaxX(), // node.getMaxY(), // SelectionType.SELECTION.getColor(),SELECTION_LINE_WIDTH); // } for (SelectionType type : selection.getSelectionTypes()) { if (type != SelectionType.NORMAL) for (int id : selection.getElements(type)) { ATreeMapNode node; // TODO remove work around for tree bug when fixed if (tree.getRoot().getID() == id) node = tree.getRoot(); else node = tree.getNodeByNumber(id); if (node != null) paintRectangle(gl, node.getMinX(), node.getMinY(), node.getMaxX(), node.getMaxY(), type.getColor(), type.getLineWidth()); } } gl.glEndList(); }
void function(GL2 gl, Tree<ATreeMapNode> tree, SelectionManager selection) { gl.glNewList(highlightList, GL2.GL_COMPILE); for (SelectionType type : selection.getSelectionTypes()) { if (type != SelectionType.NORMAL) for (int id : selection.getElements(type)) { ATreeMapNode node; if (tree.getRoot().getID() == id) node = tree.getRoot(); else node = tree.getNodeByNumber(id); if (node != null) paintRectangle(gl, node.getMinX(), node.getMinY(), node.getMaxX(), node.getMaxY(), type.getColor(), type.getLineWidth()); } } gl.glEndList(); }
/** * Renders only the highlighting to cache (not display). * * @param gl * @param tree * Treemap model. * @param selection * Selectionmanager */
Renders only the highlighting to cache (not display)
paintHighlighting
{ "repo_name": "Caleydo/caleydo", "path": "org.caleydo.view.treemap/src/org/caleydo/view/treemap/layout/TreeMapRenderer.java", "license": "bsd-3-clause", "size": 7672 }
[ "org.caleydo.core.data.graph.tree.Tree", "org.caleydo.core.data.selection.SelectionManager", "org.caleydo.core.data.selection.SelectionType" ]
import org.caleydo.core.data.graph.tree.Tree; import org.caleydo.core.data.selection.SelectionManager; import org.caleydo.core.data.selection.SelectionType;
import org.caleydo.core.data.graph.tree.*; import org.caleydo.core.data.selection.*;
[ "org.caleydo.core" ]
org.caleydo.core;
2,811,715
public void removeCommaSeparatedValue( DTCellValue52 dcv ) { if ( dcv == null ) { return; } if ( dcv.getStringValue() == null ) { return; } String[] values = dcv.getStringValue().split( "," ); if ( values.length > 0 ) { dcv.setStringValue( values[ 0 ] ); } }
void function( DTCellValue52 dcv ) { if ( dcv == null ) { return; } if ( dcv.getStringValue() == null ) { return; } String[] values = dcv.getStringValue().split( "," ); if ( values.length > 0 ) { dcv.setStringValue( values[ 0 ] ); } }
/** * Remove a comma-separated value, replacing the comma-separated value with the first in the comma-separated list * @param dcv */
Remove a comma-separated value, replacing the comma-separated value with the first in the comma-separated list
removeCommaSeparatedValue
{ "repo_name": "cristianonicolai/drools-wb", "path": "drools-wb-screens/drools-wb-guided-dtable-editor/drools-wb-guided-dtable-editor-client/src/main/java/org/drools/workbench/screens/guided/dtable/client/utils/DTCellValueUtilities.java", "license": "apache-2.0", "size": 17452 }
[ "org.drools.workbench.models.guided.dtable.shared.model.DTCellValue52" ]
import org.drools.workbench.models.guided.dtable.shared.model.DTCellValue52;
import org.drools.workbench.models.guided.dtable.shared.model.*;
[ "org.drools.workbench" ]
org.drools.workbench;
1,474,114
public static <T, R> List<R> mapNotNull( @This Iterable<T> thiz, Function<T, R> transform ) { return thiz.mapNotNullTo( new ArrayList<>(), transform ); }
static <T, R> List<R> function( @This Iterable<T> thiz, Function<T, R> transform ) { return thiz.mapNotNullTo( new ArrayList<>(), transform ); }
/** * Returns a list containing only the non-null results of applying the given {@code transform} function * to each element in the original collection. */
Returns a list containing only the non-null results of applying the given transform function to each element in the original collection
mapNotNull
{ "repo_name": "manifold-systems/manifold", "path": "manifold-deps-parent/manifold-collections/src/main/java/manifold/collections/extensions/java/lang/Iterable/ManIterableExt.java", "license": "apache-2.0", "size": 25260 }
[ "java.util.ArrayList", "java.util.List", "java.util.function.Function" ]
import java.util.ArrayList; import java.util.List; import java.util.function.Function;
import java.util.*; import java.util.function.*;
[ "java.util" ]
java.util;
767,051