Search in sources :

Example 16 with DataOutputStream

use of java.io.DataOutputStream in project hbase by apache.

the class TestPrefixTreeEncoding method testSeekWithRandomData.

@Test
public void testSeekWithRandomData() throws Exception {
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    int batchId = numBatchesWritten++;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    generateRandomTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    verifySeeking(seeker, readBuffer, batchId);
}
Also used : PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 17 with DataOutputStream

use of java.io.DataOutputStream in project buck by facebook.

the class DirArtifactCache method store.

@Override
public ListenableFuture<Void> store(ArtifactInfo info, BorrowablePath output) {
    if (!doStore) {
        return Futures.immediateFuture(null);
    }
    try {
        Optional<Path> borrowedAndStoredArtifactPath = Optional.empty();
        for (RuleKey ruleKey : info.getRuleKeys()) {
            Path artifactPath = getPathForRuleKey(ruleKey, Optional.empty());
            Path metadataPath = getPathForRuleKey(ruleKey, Optional.of(".metadata"));
            if (filesystem.exists(artifactPath) && filesystem.exists(metadataPath)) {
                continue;
            }
            filesystem.mkdirs(getParentDirForRuleKey(ruleKey));
            if (!output.canBorrow()) {
                storeArtifactOutput(output.getPath(), artifactPath);
            } else {
                // move it without copying.  This significantly optimizes the Disk I/O.
                if (!borrowedAndStoredArtifactPath.isPresent()) {
                    borrowedAndStoredArtifactPath = Optional.of(artifactPath);
                    filesystem.move(output.getPath(), artifactPath, StandardCopyOption.REPLACE_EXISTING);
                } else {
                    storeArtifactOutput(borrowedAndStoredArtifactPath.get(), artifactPath);
                }
            }
            bytesSinceLastDeleteOldFiles += filesystem.getFileSize(artifactPath);
            // Now, write the meta data artifact.
            Path tmp = filesystem.createTempFile(getPreparedTempFolder(), "metadata", TMP_EXTENSION);
            try {
                try (DataOutputStream out = new DataOutputStream(filesystem.newFileOutputStream(tmp))) {
                    out.writeInt(info.getMetadata().size());
                    for (Map.Entry<String, String> ent : info.getMetadata().entrySet()) {
                        out.writeUTF(ent.getKey());
                        byte[] val = ent.getValue().getBytes(Charsets.UTF_8);
                        out.writeInt(val.length);
                        out.write(val);
                    }
                }
                filesystem.move(tmp, metadataPath, StandardCopyOption.REPLACE_EXISTING);
                bytesSinceLastDeleteOldFiles += filesystem.getFileSize(metadataPath);
            } finally {
                filesystem.deleteFileAtPathIfExists(tmp);
            }
        }
    } catch (IOException e) {
        LOG.warn(e, "Artifact store(%s, %s) error", info.getRuleKeys(), output);
    }
    if (maxCacheSizeBytes.isPresent() && bytesSinceLastDeleteOldFiles > (maxCacheSizeBytes.get() * STORED_TO_MAX_BYTES_RATIO_TRIM_TRIGGER)) {
        bytesSinceLastDeleteOldFiles = 0L;
        deleteOldFiles();
    }
    return Futures.immediateFuture(null);
}
Also used : Path(java.nio.file.Path) BorrowablePath(com.facebook.buck.io.BorrowablePath) LazyPath(com.facebook.buck.io.LazyPath) RuleKey(com.facebook.buck.rules.RuleKey) DataOutputStream(java.io.DataOutputStream) IOException(java.io.IOException) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap)

Example 18 with DataOutputStream

use of java.io.DataOutputStream in project buck by facebook.

the class BlockingHttpEndpoint method send.

@VisibleForTesting
HttpResponse send(final HttpURLConnection connection, final String content) throws IOException {
    try (DataOutputStream out = new DataOutputStream(connection.getOutputStream())) {
        out.writeBytes(content);
        out.flush();
        out.close();
        InputStream inputStream = connection.getInputStream();
        String response = CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8));
        return new HttpResponse(response);
    } finally {
        connection.disconnect();
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) DataOutputStream(java.io.DataOutputStream) InputStream(java.io.InputStream) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 19 with DataOutputStream

use of java.io.DataOutputStream in project pinot by linkedin.

the class DataSchema method toBytes.

@Nonnull
public byte[] toBytes() throws IOException {
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream);
    int length = _columnNames.length;
    // Write the number of columns.
    dataOutputStream.writeInt(length);
    // Write the column names.
    for (String columnName : _columnNames) {
        byte[] bytes = columnName.getBytes(UTF_8);
        dataOutputStream.writeInt(bytes.length);
        dataOutputStream.write(bytes);
    }
    // Write the column types.
    for (FieldSpec.DataType columnType : _columnTypes) {
        // We don't want to use ordinal of the enum since adding a new data type will break things if server and broker
        // use different versions of DataType class.
        byte[] bytes = columnType.name().getBytes(UTF_8);
        dataOutputStream.writeInt(bytes.length);
        dataOutputStream.write(bytes);
    }
    return byteArrayOutputStream.toByteArray();
}
Also used : DataOutputStream(java.io.DataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) FieldSpec(com.linkedin.pinot.common.data.FieldSpec) Nonnull(javax.annotation.Nonnull)

Example 20 with DataOutputStream

use of java.io.DataOutputStream in project pinot by linkedin.

the class DataTableImplV2 method serializeDictionaryMap.

private byte[] serializeDictionaryMap() throws IOException {
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream);
    dataOutputStream.writeInt(_dictionaryMap.size());
    for (Entry<String, Map<Integer, String>> dictionaryMapEntry : _dictionaryMap.entrySet()) {
        String columnName = dictionaryMapEntry.getKey();
        Map<Integer, String> dictionary = dictionaryMapEntry.getValue();
        byte[] bytes = columnName.getBytes(UTF_8);
        dataOutputStream.writeInt(bytes.length);
        dataOutputStream.write(bytes);
        dataOutputStream.writeInt(dictionary.size());
        for (Entry<Integer, String> dictionaryEntry : dictionary.entrySet()) {
            dataOutputStream.writeInt(dictionaryEntry.getKey());
            byte[] valueBytes = dictionaryEntry.getValue().getBytes(UTF_8);
            dataOutputStream.writeInt(valueBytes.length);
            dataOutputStream.write(valueBytes);
        }
    }
    return byteArrayOutputStream.toByteArray();
}
Also used : DataOutputStream(java.io.DataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

DataOutputStream (java.io.DataOutputStream)2957 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1309 IOException (java.io.IOException)1019 Test (org.junit.Test)633 DataInputStream (java.io.DataInputStream)611 FileOutputStream (java.io.FileOutputStream)426 ByteArrayInputStream (java.io.ByteArrayInputStream)409 File (java.io.File)279 BufferedOutputStream (java.io.BufferedOutputStream)227 UnitTest (org.apache.geode.test.junit.categories.UnitTest)172 URL (java.net.URL)149 InputStreamReader (java.io.InputStreamReader)144 BufferedReader (java.io.BufferedReader)140 Path (org.apache.hadoop.fs.Path)137 DataInput (java.io.DataInput)124 ArrayList (java.util.ArrayList)122 HttpURLConnection (java.net.HttpURLConnection)121 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)117 FileInputStream (java.io.FileInputStream)107 InputStream (java.io.InputStream)107