use of java.util.HashSet in project groovy by apache.
the class ProxyGeneratorAdapter method createDelegateMethodList.
private static Set<String> createDelegateMethodList(Class superClass, Class delegateClass, Class[] interfaces) {
Set<String> selectedMethods = new HashSet<String>();
List<Method> interfaceMethods = new ArrayList<Method>();
List<Method> superClassMethods = new ArrayList<Method>();
Collections.addAll(superClassMethods, superClass.getDeclaredMethods());
if (interfaces != null) {
for (Class thisInterface : interfaces) {
getInheritedMethods(thisInterface, interfaceMethods);
}
for (Method method : interfaceMethods) {
if (!(containsEquivalentMethod(superClassMethods, method))) {
selectedMethods.add(method.getName() + Type.getMethodDescriptor(method));
}
}
}
List<Method> additionalMethods = getInheritedMethods(delegateClass, new ArrayList<Method>());
for (Method method : additionalMethods) {
if (method.getName().indexOf('$') != -1)
continue;
if (!containsEquivalentMethod(interfaceMethods, method) && !containsEquivalentMethod(OBJECT_METHODS, method) && !containsEquivalentMethod(GROOVYOBJECT_METHODS, method)) {
selectedMethods.add(method.getName() + Type.getMethodDescriptor(method));
}
}
return selectedMethods;
}
use of java.util.HashSet in project groovy by apache.
the class MethodRankHelper method getConflictClasses.
private static List<Pair<Class, Class>> getConflictClasses(List<MetaMethod> sugg, Class[] argumentClasses) {
List<Pair<Class, Class>> ret = new LinkedList<Pair<Class, Class>>();
Set<Class> recordedClasses = new HashSet<Class>();
for (MetaMethod method : sugg) {
Class[] para = method.getNativeParameterTypes();
for (Class aPara : para) {
if (recordedClasses.contains(aPara))
continue;
for (Class argumentClass : argumentClasses) {
if (argumentClass == null)
continue;
if (argumentClass == aPara)
continue;
if (argumentClass.getName().equals(aPara.getName())) {
ret.add(new Pair<Class, Class>(argumentClass, aPara));
}
}
recordedClasses.add(aPara);
}
}
return ret;
}
use of java.util.HashSet in project groovy by apache.
the class TraitASTTransformation method createHelperClass.
private ClassNode createHelperClass(final ClassNode cNode) {
ClassNode helper = new InnerClassNode(cNode, Traits.helperClassName(cNode), ACC_PUBLIC | ACC_STATIC | ACC_ABSTRACT | ACC_SYNTHETIC, ClassHelper.OBJECT_TYPE, ClassNode.EMPTY_ARRAY, null);
cNode.setModifiers(ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT);
checkInnerClasses(cNode);
MethodNode initializer = createInitMethod(false, cNode, helper);
MethodNode staticInitializer = createInitMethod(true, cNode, helper);
// apply the verifier to have the property nodes generated
generatePropertyMethods(cNode);
// prepare fields
List<FieldNode> fields = new ArrayList<FieldNode>();
Set<String> fieldNames = new HashSet<String>();
for (FieldNode field : cNode.getFields()) {
if (!"metaClass".equals(field.getName()) && (!field.isSynthetic() || field.getName().indexOf('$') < 0)) {
fields.add(field);
fieldNames.add(field.getName());
}
}
ClassNode fieldHelper = null;
if (!fields.isEmpty()) {
fieldHelper = new InnerClassNode(cNode, Traits.fieldHelperClassName(cNode), ACC_STATIC | ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT, ClassHelper.OBJECT_TYPE);
}
// add methods
List<MethodNode> methods = new ArrayList<MethodNode>(cNode.getMethods());
List<MethodNode> nonPublicAPIMethods = new LinkedList<MethodNode>();
for (final MethodNode methodNode : methods) {
boolean declared = methodNode.getDeclaringClass() == cNode;
if (declared) {
if (!methodNode.isSynthetic() && (methodNode.isProtected() || methodNode.getModifiers() == 0)) {
unit.addError(new SyntaxException("Cannot have protected/package private method in a trait (" + cNode.getName() + "#" + methodNode.getTypeDescriptor() + ")", methodNode.getLineNumber(), methodNode.getColumnNumber()));
return null;
}
helper.addMethod(processMethod(cNode, helper, methodNode, fieldHelper, fieldNames));
if (methodNode.isPrivate() || methodNode.isStatic()) {
nonPublicAPIMethods.add(methodNode);
}
}
}
// remove methods which should not appear in the trait interface
for (MethodNode privateMethod : nonPublicAPIMethods) {
cNode.removeMethod(privateMethod);
}
// add fields
for (FieldNode field : fields) {
processField(field, initializer, staticInitializer, fieldHelper, helper, cNode, fieldNames);
}
// clear properties to avoid generation of methods
cNode.getProperties().clear();
// copy annotations
copyClassAnnotations(cNode, helper);
// reuse the full list of fields
fields = new ArrayList<FieldNode>(cNode.getFields());
for (FieldNode field : fields) {
cNode.removeField(field.getName());
}
// visit AST xforms
registerASTTranformations(helper);
unit.getAST().addClass(helper);
if (fieldHelper != null) {
unit.getAST().addClass(fieldHelper);
}
// resolve scope (for closures)
resolveScope(helper);
if (fieldHelper != null) {
resolveScope(fieldHelper);
}
return helper;
}
use of java.util.HashSet in project hadoop by apache.
the class TestTrash method testTrashEmptier.
public void testTrashEmptier() throws Exception {
Configuration conf = new Configuration();
// Trash with 12 second deletes and 6 seconds checkpoints
// 12 seconds
conf.set(FS_TRASH_INTERVAL_KEY, "0.2");
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
// 6 seconds
conf.set(FS_TRASH_CHECKPOINT_INTERVAL_KEY, "0.1");
FileSystem fs = FileSystem.getLocal(conf);
conf.set("fs.default.name", fs.getUri().toString());
Trash trash = new Trash(conf);
// Start Emptier in background
Runnable emptier = trash.getEmptier();
Thread emptierThread = new Thread(emptier);
emptierThread.start();
FsShell shell = new FsShell();
shell.setConf(conf);
shell.init();
// First create a new directory with mkdirs
Path myPath = new Path(TEST_DIR, "test/mkdirs");
mkdir(fs, myPath);
int fileIndex = 0;
Set<String> checkpoints = new HashSet<String>();
while (true) {
// Create a file with a new name
Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++);
writeFile(fs, myFile, 10);
// Delete the file to trash
String[] args = new String[2];
args[0] = "-rm";
args[1] = myFile.toString();
int val = -1;
try {
val = shell.run(args);
} catch (Exception e) {
System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage());
}
assertTrue(val == 0);
Path trashDir = shell.getCurrentTrashDir();
FileStatus[] files = fs.listStatus(trashDir.getParent());
// Scan files in .Trash and add them to set of checkpoints
for (FileStatus file : files) {
String fileName = file.getPath().getName();
checkpoints.add(fileName);
}
// If checkpoints has 4 objects it is Current + 3 checkpoint directories
if (checkpoints.size() == 4) {
// The actual contents should be smaller since the last checkpoint
// should've been deleted and Current might not have been recreated yet
assertTrue(checkpoints.size() > files.length);
break;
}
Thread.sleep(5000);
}
emptierThread.interrupt();
emptierThread.join();
}
use of java.util.HashSet in project hadoop by apache.
the class TestStatsDMetrics method testPutMetrics.
@Test(timeout = 3000)
public void testPutMetrics() throws IOException, InterruptedException {
final StatsDSink sink = new StatsDSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Hostname, "host"));
tags.add(new MetricsTag(MsInfo.Context, "jvm"));
tags.add(new MetricsTag(MsInfo.ProcessName, "process"));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1.25, MetricType.COUNTER));
metrics.add(makeMetric("foo2", 2.25, MetricType.GAUGE));
final MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
try (DatagramSocket sock = new DatagramSocket()) {
sock.setReceiveBufferSize(8192);
final StatsDSink.StatsD mockStatsD = new StatsD(sock.getLocalAddress().getHostName(), sock.getLocalPort());
Whitebox.setInternalState(sink, "statsd", mockStatsD);
final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
sink.putMetrics(record);
sock.receive(p);
String result = new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
assertTrue("Received data did not match data sent", result.equals("host.process.jvm.Context.foo1:1.25|c") || result.equals("host.process.jvm.Context.foo2:2.25|g"));
} finally {
sink.close();
}
}
Aggregations