Search in sources :

Example 1 with ByteArrayDataOutput

use of com.google.common.io.ByteArrayDataOutput in project hadoop by apache.

the class BlockReaderFactory method tryToCreateExternalBlockReader.

private BlockReader tryToCreateExternalBlockReader() {
    List<Class<? extends ReplicaAccessorBuilder>> clses = conf.getReplicaAccessorBuilderClasses();
    for (Class<? extends ReplicaAccessorBuilder> cls : clses) {
        try {
            ByteArrayDataOutput bado = ByteStreams.newDataOutput();
            token.write(bado);
            byte[] tokenBytes = bado.toByteArray();
            Constructor<? extends ReplicaAccessorBuilder> ctor = cls.getConstructor();
            ReplicaAccessorBuilder builder = ctor.newInstance();
            long visibleLength = startOffset + length;
            ReplicaAccessor accessor = builder.setAllowShortCircuitReads(allowShortCircuitLocalReads).setBlock(block.getBlockId(), block.getBlockPoolId()).setGenerationStamp(block.getGenerationStamp()).setBlockAccessToken(tokenBytes).setClientName(clientName).setConfiguration(configuration).setFileName(fileName).setVerifyChecksum(verifyChecksum).setVisibleLength(visibleLength).build();
            if (accessor == null) {
                LOG.trace("{}: No ReplicaAccessor created by {}", this, cls.getName());
            } else {
                return new ExternalBlockReader(accessor, visibleLength, startOffset);
            }
        } catch (Throwable t) {
            LOG.warn("Failed to construct new object of type " + cls.getName(), t);
        }
    }
    return null;
}
Also used : ReplicaAccessorBuilder(org.apache.hadoop.hdfs.ReplicaAccessorBuilder) ReplicaAccessor(org.apache.hadoop.hdfs.ReplicaAccessor) ByteArrayDataOutput(com.google.common.io.ByteArrayDataOutput)

Example 2 with ByteArrayDataOutput

use of com.google.common.io.ByteArrayDataOutput in project druid by druid-io.

the class StringArrayWritable method toBytes.

public static final byte[] toBytes(final InputRow row, AggregatorFactory[] aggs, boolean reportParseExceptions) {
    try {
        ByteArrayDataOutput out = ByteStreams.newDataOutput();
        //write timestamp
        out.writeLong(row.getTimestampFromEpoch());
        //writing all dimensions
        List<String> dimList = row.getDimensions();
        WritableUtils.writeVInt(out, dimList.size());
        if (dimList != null) {
            for (String dim : dimList) {
                List<String> dimValues = row.getDimension(dim);
                writeString(dim, out);
                writeStringArray(dimValues, out);
            }
        }
        //writing all metrics
        Supplier<InputRow> supplier = new Supplier<InputRow>() {

            @Override
            public InputRow get() {
                return row;
            }
        };
        WritableUtils.writeVInt(out, aggs.length);
        for (AggregatorFactory aggFactory : aggs) {
            String k = aggFactory.getName();
            writeString(k, out);
            Aggregator agg = aggFactory.factorize(IncrementalIndex.makeColumnSelectorFactory(VirtualColumns.EMPTY, aggFactory, supplier, true));
            try {
                agg.aggregate();
            } catch (ParseException e) {
                // "aggregate" can throw ParseExceptions if a selector expects something but gets something else.
                if (reportParseExceptions) {
                    throw new ParseException(e, "Encountered parse error for aggregator[%s]", k);
                }
                log.debug(e, "Encountered parse error, skipping aggregator[%s].", k);
            }
            String t = aggFactory.getTypeName();
            if (t.equals("float")) {
                out.writeFloat(agg.getFloat());
            } else if (t.equals("long")) {
                WritableUtils.writeVLong(out, agg.getLong());
            } else {
                //its a complex metric
                Object val = agg.get();
                ComplexMetricSerde serde = getComplexMetricSerde(t);
                writeBytes(serde.toBytes(val), out);
            }
        }
        return out.toByteArray();
    } catch (IOException ex) {
        throw Throwables.propagate(ex);
    }
}
Also used : ComplexMetricSerde(io.druid.segment.serde.ComplexMetricSerde) ByteArrayDataOutput(com.google.common.io.ByteArrayDataOutput) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) Aggregator(io.druid.query.aggregation.Aggregator) Supplier(com.google.common.base.Supplier) ParseException(io.druid.java.util.common.parsers.ParseException) IOException(java.io.IOException) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory)

Example 3 with ByteArrayDataOutput

use of com.google.common.io.ByteArrayDataOutput in project MinecraftForge by MinecraftForge.

the class GenDiffSet method main.

public static void main(String[] args) throws IOException {
    //Clean Vanilla jar minecraft.jar or minecraft_server.jar
    String sourceJar = args[0];
    //Directory containing obfed output classes, typically mcp/reobf/minecraft
    String targetDir = args[1];
    //Path to FML's deobfusication_data.lzma
    String deobfData = args[2];
    //Path to place generated .binpatch
    String outputDir = args[3];
    //"true" if we should destroy the target file if it generated a successful .binpatch
    String killTarget = args[4];
    LogManager.getLogger("GENDIFF").log(Level.INFO, String.format("Creating patches at %s for %s from %s", outputDir, sourceJar, targetDir));
    Delta delta = new Delta();
    FMLDeobfuscatingRemapper remapper = FMLDeobfuscatingRemapper.INSTANCE;
    remapper.setupLoadOnly(deobfData, false);
    JarFile sourceZip = new JarFile(sourceJar);
    boolean kill = killTarget.equalsIgnoreCase("true");
    File f = new File(outputDir);
    f.mkdirs();
    for (String name : remapper.getObfedClasses()) {
        //            Logger.getLogger("GENDIFF").info(String.format("Evaluating path for data :%s",name));
        String fileName = name;
        String jarName = name;
        if (RESERVED_NAMES.contains(name.toUpperCase(Locale.ENGLISH))) {
            fileName = "_" + name;
        }
        File targetFile = new File(targetDir, fileName.replace('/', File.separatorChar) + ".class");
        jarName = jarName + ".class";
        if (targetFile.exists()) {
            String sourceClassName = name.replace('/', '.');
            String targetClassName = remapper.map(name).replace('/', '.');
            JarEntry entry = sourceZip.getJarEntry(jarName);
            byte[] vanillaBytes = toByteArray(sourceZip, entry);
            byte[] patchedBytes = Files.toByteArray(targetFile);
            byte[] diff = delta.compute(vanillaBytes, patchedBytes);
            ByteArrayDataOutput diffOut = ByteStreams.newDataOutput(diff.length + 50);
            // Original name
            diffOut.writeUTF(name);
            // Source name
            diffOut.writeUTF(sourceClassName);
            // Target name
            diffOut.writeUTF(targetClassName);
            // exists at original
            diffOut.writeBoolean(entry != null);
            if (entry != null) {
                diffOut.writeInt(Hashing.adler32().hashBytes(vanillaBytes).asInt());
            }
            // length of patch
            diffOut.writeInt(diff.length);
            // patch
            diffOut.write(diff);
            File target = new File(outputDir, targetClassName + ".binpatch");
            target.getParentFile().mkdirs();
            Files.write(diffOut.toByteArray(), target);
            Logger.getLogger("GENDIFF").info(String.format("Wrote patch for %s (%s) at %s", name, targetClassName, target.getAbsolutePath()));
            if (kill) {
                targetFile.delete();
                Logger.getLogger("GENDIFF").info(String.format("  Deleted target: %s", targetFile.toString()));
            }
        }
    }
    sourceZip.close();
}
Also used : FMLDeobfuscatingRemapper(net.minecraftforge.fml.common.asm.transformers.deobf.FMLDeobfuscatingRemapper) Delta(net.minecraftforge.fml.repackage.com.nothome.delta.Delta) ByteArrayDataOutput(com.google.common.io.ByteArrayDataOutput) JarFile(java.util.jar.JarFile) JarEntry(java.util.jar.JarEntry) JarFile(java.util.jar.JarFile) File(java.io.File)

Example 4 with ByteArrayDataOutput

use of com.google.common.io.ByteArrayDataOutput in project cdap by caskdata.

the class AbstractStreamFileConsumer method claimEntry.

/**
   * Try to claim a stream event offset.
   *
   * @return The row key for writing to the state table if successfully claimed or {@code null} if not claimed.
   */
private byte[] claimEntry(StreamFileOffset offset, byte[] claimedStateContent) throws IOException {
    ByteArrayDataOutput out = ByteStreams.newDataOutput(50);
    out.writeLong(consumerConfig.getGroupId());
    StreamUtils.encodeOffset(out, offset);
    byte[] row = out.toByteArray();
    SortedMap<byte[], byte[]> rowStates = getInitRowStates(row);
    // See if the entry should be ignored. If it is in the rowStates with null value, then it should be ignored.
    byte[] rowState = rowStates.get(row);
    if (rowStates.containsKey(row) && rowState == null) {
        return null;
    }
    // Only need to claim entry if FIFO and group size > 1
    if (consumerConfig.getDequeueStrategy() == DequeueStrategy.FIFO && consumerConfig.getGroupSize() > 1) {
        return claimFifoEntry(row, claimedStateContent, rowState) ? row : null;
    }
    // as it's already handled by the readFilter
    return row;
}
Also used : ByteArrayDataOutput(com.google.common.io.ByteArrayDataOutput)

Example 5 with ByteArrayDataOutput

use of com.google.common.io.ByteArrayDataOutput in project hbase by apache.

the class Mutation method setClusterIds.

/**
   * Marks that the clusters with the given clusterIds have consumed the mutation
   * @param clusterIds of the clusters that have consumed the mutation
   */
public Mutation setClusterIds(List<UUID> clusterIds) {
    ByteArrayDataOutput out = ByteStreams.newDataOutput();
    out.writeInt(clusterIds.size());
    for (UUID clusterId : clusterIds) {
        out.writeLong(clusterId.getMostSignificantBits());
        out.writeLong(clusterId.getLeastSignificantBits());
    }
    setAttribute(CONSUMED_CLUSTER_IDS, out.toByteArray());
    return this;
}
Also used : ByteArrayDataOutput(com.google.common.io.ByteArrayDataOutput) UUID(java.util.UUID)

Aggregations

ByteArrayDataOutput (com.google.common.io.ByteArrayDataOutput)20 ConnectorTableHandle (com.facebook.presto.spi.ConnectorTableHandle)2 IOException (java.io.IOException)2 Test (org.testng.annotations.Test)2 Split (co.cask.cdap.api.data.batch.Split)1 RaptorColumnHandle (com.facebook.presto.raptor.RaptorColumnHandle)1 RaptorColumnIdentity (com.facebook.presto.raptor.RaptorColumnIdentity)1 RaptorTableIdentity (com.facebook.presto.raptor.RaptorTableIdentity)1 ColumnHandle (com.facebook.presto.spi.ColumnHandle)1 ColumnIdentity (com.facebook.presto.spi.ColumnIdentity)1 TableIdentity (com.facebook.presto.spi.TableIdentity)1 JsonValue (com.fasterxml.jackson.annotation.JsonValue)1 Supplier (com.google.common.base.Supplier)1 ByteString (com.google.protobuf.ByteString)1 ServiceException (com.google.protobuf.ServiceException)1 InputRow (io.druid.data.input.InputRow)1 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)1 ParseException (io.druid.java.util.common.parsers.ParseException)1 Aggregator (io.druid.query.aggregation.Aggregator)1 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)1