Search in sources :

Example 46 with EnumMap

use of java.util.EnumMap in project cassandra by apache.

the class CloudstackSnitchTest method testRacks.

@Test
public void testRacks() throws IOException, ConfigurationException {
    az = "ch-gva-1";
    CloudstackSnitch snitch = new TestCloudstackSnitch();
    InetAddress local = InetAddress.getByName("127.0.0.1");
    InetAddress nonlocal = InetAddress.getByName("127.0.0.7");
    Gossiper.instance.addSavedEndpoint(nonlocal);
    Map<ApplicationState, VersionedValue> stateMap = new EnumMap<>(ApplicationState.class);
    stateMap.put(ApplicationState.DC, StorageService.instance.valueFactory.datacenter("ch-zrh"));
    stateMap.put(ApplicationState.RACK, StorageService.instance.valueFactory.rack("2"));
    Gossiper.instance.getEndpointStateForEndpoint(nonlocal).addApplicationStates(stateMap);
    assertEquals("ch-zrh", snitch.getDatacenter(nonlocal));
    assertEquals("2", snitch.getRack(nonlocal));
    assertEquals("ch-gva", snitch.getDatacenter(local));
    assertEquals("1", snitch.getRack(local));
}
Also used : VersionedValue(org.apache.cassandra.gms.VersionedValue) ApplicationState(org.apache.cassandra.gms.ApplicationState) InetAddress(java.net.InetAddress) EnumMap(java.util.EnumMap) Test(org.junit.Test)

Example 47 with EnumMap

use of java.util.EnumMap in project cassandra by apache.

the class EC2SnitchTest method testRac.

@Test
public void testRac() throws IOException, ConfigurationException {
    az = "us-east-1d";
    Ec2Snitch snitch = new TestEC2Snitch();
    InetAddress local = InetAddress.getByName("127.0.0.1");
    InetAddress nonlocal = InetAddress.getByName("127.0.0.7");
    Gossiper.instance.addSavedEndpoint(nonlocal);
    Map<ApplicationState, VersionedValue> stateMap = new EnumMap<>(ApplicationState.class);
    stateMap.put(ApplicationState.DC, StorageService.instance.valueFactory.datacenter("us-west"));
    stateMap.put(ApplicationState.RACK, StorageService.instance.valueFactory.datacenter("1a"));
    Gossiper.instance.getEndpointStateForEndpoint(nonlocal).addApplicationStates(stateMap);
    assertEquals("us-west", snitch.getDatacenter(nonlocal));
    assertEquals("1a", snitch.getRack(nonlocal));
    assertEquals("us-east", snitch.getDatacenter(local));
    assertEquals("1d", snitch.getRack(local));
}
Also used : VersionedValue(org.apache.cassandra.gms.VersionedValue) ApplicationState(org.apache.cassandra.gms.ApplicationState) InetAddress(java.net.InetAddress) EnumMap(java.util.EnumMap) Test(org.junit.Test)

Example 48 with EnumMap

use of java.util.EnumMap in project cassandra by apache.

the class GoogleCloudSnitchTest method testRac.

@Test
public void testRac() throws IOException, ConfigurationException {
    az = "us-central1-a";
    GoogleCloudSnitch snitch = new TestGoogleCloudSnitch();
    InetAddress local = InetAddress.getByName("127.0.0.1");
    InetAddress nonlocal = InetAddress.getByName("127.0.0.7");
    Gossiper.instance.addSavedEndpoint(nonlocal);
    Map<ApplicationState, VersionedValue> stateMap = new EnumMap<>(ApplicationState.class);
    stateMap.put(ApplicationState.DC, StorageService.instance.valueFactory.datacenter("europe-west1"));
    stateMap.put(ApplicationState.RACK, StorageService.instance.valueFactory.datacenter("a"));
    Gossiper.instance.getEndpointStateForEndpoint(nonlocal).addApplicationStates(stateMap);
    assertEquals("europe-west1", snitch.getDatacenter(nonlocal));
    assertEquals("a", snitch.getRack(nonlocal));
    assertEquals("us-central1", snitch.getDatacenter(local));
    assertEquals("a", snitch.getRack(local));
}
Also used : VersionedValue(org.apache.cassandra.gms.VersionedValue) ApplicationState(org.apache.cassandra.gms.ApplicationState) InetAddress(java.net.InetAddress) EnumMap(java.util.EnumMap) Test(org.junit.Test)

Example 49 with EnumMap

use of java.util.EnumMap in project hadoop by apache.

the class TestDFSNetworkTopology method testAddAndRemoveTopology.

/**
   * Test the correctness of storage type info when nodes are added and removed.
   * @throws Exception
   */
@Test
public void testAddAndRemoveTopology() throws Exception {
    String[] newRack = { "/l1/d1/r1", "/l1/d1/r3", "/l1/d3/r3", "/l1/d3/r3" };
    String[] newHost = { "nhost1", "nhost2", "nhost3", "nhost4" };
    String[] newips = { "30.30.30.30", "31.31.31.31", "32.32.32.32", "33.33.33.33" };
    StorageType[] newTypes = { StorageType.DISK, StorageType.SSD, StorageType.SSD, StorageType.SSD };
    DatanodeDescriptor[] newDD = new DatanodeDescriptor[4];
    for (int i = 0; i < 4; i++) {
        DatanodeStorageInfo dsi = DFSTestUtil.createDatanodeStorageInfo("s" + newHost[i], newips[i], newRack[i], newHost[i], newTypes[i], null);
        newDD[i] = dsi.getDatanodeDescriptor();
        CLUSTER.add(newDD[i]);
    }
    DFSTopologyNodeImpl d1 = (DFSTopologyNodeImpl) CLUSTER.getNode("/l1/d1");
    HashMap<String, EnumMap<StorageType, Integer>> d1info = d1.getChildrenStorageInfo();
    assertEquals(3, d1info.keySet().size());
    assertTrue(d1info.get("r1").size() == 2 && d1info.get("r2").size() == 2 && d1info.get("r3").size() == 1);
    assertEquals(2, (int) d1info.get("r1").get(StorageType.DISK));
    assertEquals(1, (int) d1info.get("r1").get(StorageType.ARCHIVE));
    assertEquals(2, (int) d1info.get("r2").get(StorageType.DISK));
    assertEquals(1, (int) d1info.get("r2").get(StorageType.ARCHIVE));
    assertEquals(1, (int) d1info.get("r3").get(StorageType.SSD));
    DFSTopologyNodeImpl d3 = (DFSTopologyNodeImpl) CLUSTER.getNode("/l1/d3");
    HashMap<String, EnumMap<StorageType, Integer>> d3info = d3.getChildrenStorageInfo();
    assertEquals(1, d3info.keySet().size());
    assertTrue(d3info.get("r3").size() == 1);
    assertEquals(2, (int) d3info.get("r3").get(StorageType.SSD));
    DFSTopologyNodeImpl l1 = (DFSTopologyNodeImpl) CLUSTER.getNode("/l1");
    HashMap<String, EnumMap<StorageType, Integer>> l1info = l1.getChildrenStorageInfo();
    assertEquals(3, l1info.keySet().size());
    assertTrue(l1info.get("d1").size() == 3 && l1info.get("d2").size() == 3 && l1info.get("d3").size() == 1);
    assertEquals(4, (int) l1info.get("d1").get(StorageType.DISK));
    assertEquals(2, (int) l1info.get("d1").get(StorageType.ARCHIVE));
    assertEquals(1, (int) l1info.get("d1").get(StorageType.SSD));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.SSD));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.RAM_DISK));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.DISK));
    assertEquals(2, (int) l1info.get("d3").get(StorageType.SSD));
    for (int i = 0; i < 4; i++) {
        CLUSTER.remove(newDD[i]);
    }
    // /d1/r3 should've been out, /d1/r1 should've been resumed
    DFSTopologyNodeImpl nd1 = (DFSTopologyNodeImpl) CLUSTER.getNode("/l1/d1");
    HashMap<String, EnumMap<StorageType, Integer>> nd1info = nd1.getChildrenStorageInfo();
    assertEquals(2, nd1info.keySet().size());
    assertTrue(nd1info.get("r1").size() == 2 && nd1info.get("r2").size() == 2);
    assertEquals(1, (int) nd1info.get("r1").get(StorageType.DISK));
    assertEquals(1, (int) nd1info.get("r1").get(StorageType.ARCHIVE));
    assertEquals(2, (int) nd1info.get("r2").get(StorageType.DISK));
    assertEquals(1, (int) nd1info.get("r2").get(StorageType.ARCHIVE));
    // /l1/d3 should've been out, and /l1/d1 should've been resumed
    DFSTopologyNodeImpl nl1 = (DFSTopologyNodeImpl) CLUSTER.getNode("/l1");
    HashMap<String, EnumMap<StorageType, Integer>> nl1info = nl1.getChildrenStorageInfo();
    assertEquals(2, nl1info.keySet().size());
    assertTrue(l1info.get("d1").size() == 2 && l1info.get("d2").size() == 3);
    assertEquals(2, (int) nl1info.get("d1").get(StorageType.ARCHIVE));
    assertEquals(3, (int) nl1info.get("d1").get(StorageType.DISK));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.DISK));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.RAM_DISK));
    assertEquals(1, (int) l1info.get("d2").get(StorageType.SSD));
    assertNull(CLUSTER.getNode("/l1/d3"));
}
Also used : DatanodeDescriptor(org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor) StorageType(org.apache.hadoop.fs.StorageType) DatanodeStorageInfo(org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo) EnumMap(java.util.EnumMap) Test(org.junit.Test)

Example 50 with EnumMap

use of java.util.EnumMap in project collect by opendatakit.

the class QRCodeUtils method decodeFromBitmap.

public static String decodeFromBitmap(Bitmap bitmap) throws DataFormatException, IOException, FormatException, ChecksumException, NotFoundException {
    Map<DecodeHintType, Object> tmpHintsMap = new EnumMap<>(DecodeHintType.class);
    tmpHintsMap.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
    tmpHintsMap.put(DecodeHintType.POSSIBLE_FORMATS, BarcodeFormat.QR_CODE);
    tmpHintsMap.put(DecodeHintType.PURE_BARCODE, Boolean.FALSE);
    Reader reader = new QRCodeMultiReader();
    Result result = reader.decode(getBinaryBitmap(bitmap), tmpHintsMap);
    return CompressionUtils.decompress(result.getText());
}
Also used : QRCodeMultiReader(com.google.zxing.multi.qrcode.QRCodeMultiReader) DecodeHintType(com.google.zxing.DecodeHintType) QRCodeMultiReader(com.google.zxing.multi.qrcode.QRCodeMultiReader) Reader(com.google.zxing.Reader) EnumMap(java.util.EnumMap) Result(com.google.zxing.Result)

Aggregations

EnumMap (java.util.EnumMap)389 Map (java.util.Map)73 ArrayList (java.util.ArrayList)70 List (java.util.List)61 HashMap (java.util.HashMap)60 Test (org.junit.Test)46 IOException (java.io.IOException)38 Collection (java.util.Collection)35 DecodeHintType (com.google.zxing.DecodeHintType)30 HashSet (java.util.HashSet)26 Set (java.util.Set)26 EncodeHintType (com.google.zxing.EncodeHintType)17 File (java.io.File)17 BitMatrix (com.google.zxing.common.BitMatrix)15 BookID (biblemulticonverter.data.BookID)14 Iterator (java.util.Iterator)14 URL (java.net.URL)12 TreeMap (java.util.TreeMap)12 Header (com.jsql.model.bean.util.Header)10 Request (com.jsql.model.bean.util.Request)10