Search in sources :

Example 76 with HashSet

use of java.util.HashSet in project hadoop by apache.

the class TestStatsDMetrics method testPutMetrics2.

@Test(timeout = 3000)
public void testPutMetrics2() throws IOException {
    StatsDSink sink = new StatsDSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Hostname, null));
    tags.add(new MetricsTag(MsInfo.Context, "jvm"));
    tags.add(new MetricsTag(MsInfo.ProcessName, "process"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1, MetricType.COUNTER));
    metrics.add(makeMetric("foo2", 2, MetricType.GAUGE));
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    try (DatagramSocket sock = new DatagramSocket()) {
        sock.setReceiveBufferSize(8192);
        final StatsDSink.StatsD mockStatsD = new StatsD(sock.getLocalAddress().getHostName(), sock.getLocalPort());
        Whitebox.setInternalState(sink, "statsd", mockStatsD);
        final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
        sink.putMetrics(record);
        sock.receive(p);
        String result = new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
        assertTrue("Received data did not match data sent", result.equals("process.jvm.Context.foo1:1|c") || result.equals("process.jvm.Context.foo2:2|g"));
    } finally {
        sink.close();
    }
}
Also used : StatsDSink(org.apache.hadoop.metrics2.sink.StatsDSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramSocket(java.net.DatagramSocket) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramPacket(java.net.DatagramPacket) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 77 with HashSet

use of java.util.HashSet in project hadoop by apache.

the class TestGraphiteMetrics method testPutMetrics.

@Test
public void testPutMetrics() {
    GraphiteSink sink = new GraphiteSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Context, "all"));
    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1.25));
    metrics.add(makeMetric("foo2", 2.25));
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
    final GraphiteSink.Graphite mockGraphite = makeGraphite();
    Whitebox.setInternalState(sink, "graphite", mockGraphite);
    sink.putMetrics(record);
    try {
        verify(mockGraphite).write(argument.capture());
    } catch (IOException e) {
        e.printStackTrace();
    }
    String result = argument.getValue();
    assertEquals(true, result.equals("null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n") || result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n"));
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) Matchers.anyString(org.mockito.Matchers.anyString) IOException(java.io.IOException) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) GraphiteSink(org.apache.hadoop.metrics2.sink.GraphiteSink) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 78 with HashSet

use of java.util.HashSet in project hadoop by apache.

the class TestGraphiteMetrics method testFailureAndPutMetrics.

@Test
public void testFailureAndPutMetrics() throws IOException {
    GraphiteSink sink = new GraphiteSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Context, "all"));
    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1.25));
    metrics.add(makeMetric("foo2", 2.25));
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    final GraphiteSink.Graphite mockGraphite = makeGraphite();
    Whitebox.setInternalState(sink, "graphite", mockGraphite);
    // throw exception when first try
    doThrow(new IOException("IO exception")).when(mockGraphite).write(anyString());
    sink.putMetrics(record);
    verify(mockGraphite).write(anyString());
    verify(mockGraphite).close();
    // reset mock and try again
    reset(mockGraphite);
    when(mockGraphite.isConnected()).thenReturn(false);
    ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
    sink.putMetrics(record);
    verify(mockGraphite).write(argument.capture());
    String result = argument.getValue();
    assertEquals(true, result.equals("null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n") || result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n"));
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) IOException(java.io.IOException) Matchers.anyString(org.mockito.Matchers.anyString) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) GraphiteSink(org.apache.hadoop.metrics2.sink.GraphiteSink) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 79 with HashSet

use of java.util.HashSet in project hadoop by apache.

the class TestMiniKdc method testKerberosLogin.

@Test
public void testKerberosLogin() throws Exception {
    MiniKdc kdc = getKdc();
    File workDir = getWorkDir();
    LoginContext loginContext = null;
    try {
        String principal = "foo";
        File keytab = new File(workDir, "foo.keytab");
        kdc.createPrincipal(keytab, principal);
        Set<Principal> principals = new HashSet<Principal>();
        principals.add(new KerberosPrincipal(principal));
        //client login
        Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
        loginContext = new LoginContext("", subject, null, KerberosConfiguration.createClientConfig(principal, keytab));
        loginContext.login();
        subject = loginContext.getSubject();
        Assert.assertEquals(1, subject.getPrincipals().size());
        Assert.assertEquals(KerberosPrincipal.class, subject.getPrincipals().iterator().next().getClass());
        Assert.assertEquals(principal + "@" + kdc.getRealm(), subject.getPrincipals().iterator().next().getName());
        loginContext.logout();
        //server login
        subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
        loginContext = new LoginContext("", subject, null, KerberosConfiguration.createServerConfig(principal, keytab));
        loginContext.login();
        subject = loginContext.getSubject();
        Assert.assertEquals(1, subject.getPrincipals().size());
        Assert.assertEquals(KerberosPrincipal.class, subject.getPrincipals().iterator().next().getClass());
        Assert.assertEquals(principal + "@" + kdc.getRealm(), subject.getPrincipals().iterator().next().getName());
        loginContext.logout();
    } finally {
        if (loginContext != null) {
            loginContext.logout();
        }
    }
}
Also used : KerberosPrincipal(javax.security.auth.kerberos.KerberosPrincipal) LoginContext(javax.security.auth.login.LoginContext) File(java.io.File) KerberosPrincipal(javax.security.auth.kerberos.KerberosPrincipal) Principal(java.security.Principal) Subject(javax.security.auth.Subject) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 80 with HashSet

use of java.util.HashSet in project hadoop by apache.

the class DatasetVolumeChecker method checkVolume.

/**
   * Check a single volume asynchronously, returning a {@link ListenableFuture}
   * that can be used to retrieve the final result.
   *
   * If the volume cannot be referenced then it is already closed and
   * cannot be checked. No error is propagated to the callback.
   *
   * @param volume the volume that is to be checked.
   * @param callback callback to be invoked when the volume check completes.
   * @return true if the check was scheduled and the callback will be invoked.
   *         false otherwise.
   */
public boolean checkVolume(final FsVolumeSpi volume, Callback callback) {
    if (volume == null) {
        LOG.debug("Cannot schedule check on null volume");
        return false;
    }
    FsVolumeReference volumeReference;
    try {
        volumeReference = volume.obtainReference();
    } catch (ClosedChannelException e) {
        // The volume has already been closed.
        return false;
    }
    Optional<ListenableFuture<VolumeCheckResult>> olf = delegateChecker.schedule(volume, IGNORED_CONTEXT);
    if (olf.isPresent()) {
        numVolumeChecks.incrementAndGet();
        Futures.addCallback(olf.get(), new ResultHandler(volumeReference, new HashSet<>(), new HashSet<>(), new AtomicLong(1), callback));
        return true;
    } else {
        IOUtils.cleanup(null, volumeReference);
    }
    return false;
}
Also used : ClosedChannelException(java.nio.channels.ClosedChannelException) AtomicLong(java.util.concurrent.atomic.AtomicLong) FsVolumeReference(org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) HashSet(java.util.HashSet)

Aggregations

HashSet (java.util.HashSet)12137 Set (java.util.Set)2609 ArrayList (java.util.ArrayList)2318 HashMap (java.util.HashMap)2096 Test (org.junit.Test)2060 Map (java.util.Map)1198 Iterator (java.util.Iterator)979 IOException (java.io.IOException)934 List (java.util.List)911 File (java.io.File)607 LinkedHashSet (java.util.LinkedHashSet)460 Test (org.testng.annotations.Test)460 TreeSet (java.util.TreeSet)271 Collection (java.util.Collection)233 LinkedList (java.util.LinkedList)224 Region (org.apache.geode.cache.Region)202 SSOException (com.iplanet.sso.SSOException)188 Date (java.util.Date)180 LinkedHashMap (java.util.LinkedHashMap)169 PartitionedRegion (org.apache.geode.internal.cache.PartitionedRegion)166