Search in sources :

Example 61 with HeapDataOutputStream

use of org.apache.geode.internal.HeapDataOutputStream in project geode by apache.

the class PdxAttributesJUnitTest method defineAType.

private void defineAType() throws IOException {
    SimpleClass sc = new SimpleClass(1, (byte) 2);
    HeapDataOutputStream out = new HeapDataOutputStream(Version.CURRENT);
    DataSerializer.writeObject(sc, out);
}
Also used : HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream)

Example 62 with HeapDataOutputStream

use of org.apache.geode.internal.HeapDataOutputStream in project geode by apache.

the class PdxClientServerDUnitTest method testMultipleServerDSes.

/**
   * Test to make sure that types are sent to all pools, even if they are in multiple distributed
   * systems.
   */
@Test
public void testMultipleServerDSes() throws Exception {
    Host host = Host.getHost(0);
    final VM vm0 = host.getVM(0);
    VM vm1 = host.getVM(1);
    VM vm2 = host.getVM(2);
    VM vm3 = host.getVM(3);
    final int port1 = createLonerServerRegion(vm0, "region1", "1");
    final int port2 = createLonerServerRegion(vm1, "region2", "2");
    SerializableCallable createRegion = new SerializableCallable() {

        public Object call() throws Exception {
            Properties props = new Properties();
            props.setProperty(MCAST_PORT, "0");
            props.setProperty(LOCATORS, "");
            getSystem(props);
            Cache cache = getCache();
            PoolFactory pf = PoolManager.createFactory();
            pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port1);
            pf.create("pool1");
            pf = PoolManager.createFactory();
            pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port2);
            pf.create("pool2");
            AttributesFactory af = new AttributesFactory();
            af.setPoolName("pool1");
            cache.createRegion("region1", af.create());
            af = new AttributesFactory();
            af.setPoolName("pool2");
            cache.createRegion("region2", af.create());
            return null;
        }
    };
    vm2.invoke(createRegion);
    vm3.invoke(createRegion);
    createRegion.call();
    // Serialize an object and put it in both regions, sending
    // the event to each pool
    vm2.invoke(() -> {
        HeapDataOutputStream bytes = new HeapDataOutputStream(Version.CURRENT);
        Region r1 = getRootRegion("region1");
        r1.put(1, new SimpleClass(57, (byte) 3));
        Region r2 = getRootRegion("region2");
        r2.put(1, new SimpleClass(57, (byte) 3));
        return null;
    });
    // Make sure we get deserialize the value in a different client
    vm3.invoke(() -> {
        Region r = getRootRegion("region1");
        assertEquals(new SimpleClass(57, (byte) 3), r.get(1));
        return null;
    });
    // Make sure we can get the entry in the current member
    Region r = getRootRegion("region2");
    assertEquals(new SimpleClass(57, (byte) 3), r.get(1));
}
Also used : PoolFactory(org.apache.geode.cache.client.PoolFactory) AttributesFactory(org.apache.geode.cache.AttributesFactory) HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream) VM(org.apache.geode.test.dunit.VM) SerializableCallable(org.apache.geode.test.dunit.SerializableCallable) Region(org.apache.geode.cache.Region) Host(org.apache.geode.test.dunit.Host) Properties(java.util.Properties) Cache(org.apache.geode.cache.Cache) ClientCache(org.apache.geode.cache.client.ClientCache) Test(org.junit.Test) SerializationTest(org.apache.geode.test.junit.categories.SerializationTest) DistributedTest(org.apache.geode.test.junit.categories.DistributedTest)

Example 63 with HeapDataOutputStream

use of org.apache.geode.internal.HeapDataOutputStream in project geode by apache.

the class NonDelegatingLoader method loadClass.

@Override
public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
    if (!name.contains("SeparateClassloaderPdx")) {
        return super.loadClass(name, resolve);
    }
    URL url = super.getResource(name.replace('.', File.separatorChar) + ".class");
    if (url == null) {
        throw new ClassNotFoundException();
    }
    HeapDataOutputStream hoas = new HeapDataOutputStream(Version.CURRENT);
    InputStream classStream;
    try {
        classStream = url.openStream();
        while (true) {
            byte[] chunk = new byte[1024];
            int read = classStream.read(chunk);
            if (read < 0) {
                break;
            }
            hoas.write(chunk, 0, read);
        }
    } catch (IOException e) {
        throw new ClassNotFoundException("Error reading class", e);
    }
    Class clazz = defineClass(name, hoas.toByteBuffer(), null);
    if (resolve) {
        resolveClass(clazz);
    }
    return clazz;
}
Also used : HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) URL(java.net.URL)

Example 64 with HeapDataOutputStream

use of org.apache.geode.internal.HeapDataOutputStream in project geode by apache.

the class StructStreamingResultSender method send.

/**
   * Send the result in chunks. There are 3 types of chunk: TYPE, DATA, and ERROR.
   * TYPE chunk for sending struct type info, DATA chunk for sending data, and
   * ERROR chunk for sending exception. There are at most 1 TYPE chunk (omitted
   * for `KeyValueType`) and 1 ERROR chunk (if there's error), but usually
   * there are multiple DATA chunks. Each DATA chunk contains multiple rows
   * of data. The chunk size is determined by the const `CHUNK_SIZE`. If an
   * exception is thrown, it is serialized and sent as the last chunk  of the 
   * result (in the form of ERROR chunk).
   */
public void send() {
    if (closed)
        throw new RuntimeException("sender is closed.");
    HeapDataOutputStream buf = new HeapDataOutputStream(CHUNK_SIZE + 2048, null);
    String dataType = null;
    int typeSize = 0;
    int rowCount = 0;
    int dataSize = 0;
    try {
        if (rows.hasNext()) {
            // Note: only send type info if there's data with it
            typeSize = sendType(buf);
            buf.writeByte(DATA_CHUNK);
            int rowSize = structType == null ? 2 : structType.getFieldNames().length;
            while (rows.hasNext()) {
                rowCount++;
                Object[] row = rows.next();
                if (rowCount < 2)
                    dataType = entryDataType(row);
                if (rowSize != row.length)
                    throw new IOException(rowToString("Expect " + rowSize + " columns, but got ", row));
                serializeRowToBuffer(row, buf);
                if (buf.size() > CHUNK_SIZE) {
                    dataSize += sendBufferredData(buf, false);
                    buf.writeByte(DATA_CHUNK);
                }
            }
        }
        // send last piece of data or empty byte array
        dataSize += sendBufferredData(buf, true);
        logger.info(desc + ": " + rowCount + " rows, type=" + dataType + ", type.size=" + typeSize + ", data.size=" + dataSize + ", row.avg.size=" + (rowCount == 0 ? "NaN" : String.format("%.1f", ((float) dataSize) / rowCount)));
    } catch (IOException | RuntimeException e) {
        sendException(buf, e);
    } finally {
        closed = true;
    }
}
Also used : HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream) IOException(java.io.IOException)

Example 65 with HeapDataOutputStream

use of org.apache.geode.internal.HeapDataOutputStream in project geode by apache.

the class JSONUtils method formulateJsonForListQueriesCall.

public static String formulateJsonForListQueriesCall(Region<String, String> queryRegion) {
    HeapDataOutputStream outputStream = new HeapDataOutputStream(org.apache.geode.internal.Version.CURRENT);
    try {
        JsonGenerator generator = enableDisableJSONGeneratorFeature(getObjectMapper().getFactory().createGenerator((OutputStream) outputStream, JsonEncoding.UTF8));
        JsonWriter.writeQueryListAsJson(generator, "queries", queryRegion);
        generator.close();
        return new String(outputStream.toByteArray());
    } catch (IOException e) {
        throw new RuntimeException(e.getMessage());
    } finally {
        outputStream.close();
    }
}
Also used : HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream) OutputStream(java.io.OutputStream) HeapDataOutputStream(org.apache.geode.internal.HeapDataOutputStream) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) IOException(java.io.IOException)

Aggregations

HeapDataOutputStream (org.apache.geode.internal.HeapDataOutputStream)134 Test (org.junit.Test)55 IOException (java.io.IOException)40 IntegrationTest (org.apache.geode.test.junit.categories.IntegrationTest)36 SerializationTest (org.apache.geode.test.junit.categories.SerializationTest)33 DataInputStream (java.io.DataInputStream)29 ByteArrayInputStream (java.io.ByteArrayInputStream)23 UnitTest (org.apache.geode.test.junit.categories.UnitTest)15 DiskAccessException (org.apache.geode.cache.DiskAccessException)12 InternalDistributedMember (org.apache.geode.distributed.internal.membership.InternalDistributedMember)11 PdxSerializerObject (org.apache.geode.internal.PdxSerializerObject)10 JsonGenerator (com.fasterxml.jackson.core.JsonGenerator)8 Version (org.apache.geode.internal.Version)8 DataInput (java.io.DataInput)7 SerializableCallable (org.apache.geode.test.dunit.SerializableCallable)7 OutputStream (java.io.OutputStream)6 Properties (java.util.Properties)6 ByteBuffer (java.nio.ByteBuffer)5 HashMap (java.util.HashMap)5 InternalGemFireException (org.apache.geode.InternalGemFireException)5