Search in sources :

Example 21 with FileChannel

use of java.nio.channels.FileChannel in project hbase by apache.

the class TestBufferChain method testWithSpy.

@Test
public void testWithSpy() throws IOException {
    ByteBuffer[] bufs = new ByteBuffer[] { stringBuf("XXXhelloYYY", 3, 5), stringBuf(" ", 0, 1), stringBuf("XXXXworldY", 4, 5) };
    BufferChain chain = new BufferChain(bufs);
    FileOutputStream fos = new FileOutputStream(tmpFile);
    FileChannel ch = Mockito.spy(fos.getChannel());
    try {
        chain.write(ch, 2);
        assertEquals("he", Files.toString(tmpFile, Charsets.UTF_8));
        chain.write(ch, 2);
        assertEquals("hell", Files.toString(tmpFile, Charsets.UTF_8));
        chain.write(ch, 3);
        assertEquals("hello w", Files.toString(tmpFile, Charsets.UTF_8));
        chain.write(ch, 8);
        assertEquals("hello world", Files.toString(tmpFile, Charsets.UTF_8));
    } finally {
        ch.close();
        fos.close();
    }
}
Also used : FileChannel(java.nio.channels.FileChannel) FileOutputStream(java.io.FileOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 22 with FileChannel

use of java.nio.channels.FileChannel in project tomcat by apache.

the class ExpandWar method copy.

/**
     * Copy the specified file or directory to the destination.
     *
     * @param src File object representing the source
     * @param dest File object representing the destination
     * @return <code>true</code> if the copy was successful
     */
public static boolean copy(File src, File dest) {
    boolean result = true;
    String[] files = null;
    if (src.isDirectory()) {
        files = src.list();
        result = dest.mkdir();
    } else {
        files = new String[1];
        files[0] = "";
    }
    if (files == null) {
        files = new String[0];
    }
    for (int i = 0; (i < files.length) && result; i++) {
        File fileSrc = new File(src, files[i]);
        File fileDest = new File(dest, files[i]);
        if (fileSrc.isDirectory()) {
            result = copy(fileSrc, fileDest);
        } else {
            try (FileChannel ic = (new FileInputStream(fileSrc)).getChannel();
                FileChannel oc = (new FileOutputStream(fileDest)).getChannel()) {
                ic.transferTo(0, ic.size(), oc);
            } catch (IOException e) {
                log.error(sm.getString("expandWar.copy", fileSrc, fileDest), e);
                result = false;
            }
        }
    }
    return result;
}
Also used : FileChannel(java.nio.channels.FileChannel) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) JarFile(java.util.jar.JarFile) File(java.io.File) FileInputStream(java.io.FileInputStream)

Example 23 with FileChannel

use of java.nio.channels.FileChannel in project cryptomator by cryptomator.

the class UpgradeVersion4to5 method migrate.

@SuppressWarnings("deprecation")
private void migrate(Path file, BasicFileAttributes attrs, Cryptor cryptor) throws IOException {
    LOG.info("Starting migration of {}...", file);
    try (FileChannel ch = FileChannel.open(file, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
        // read header:
        ByteBuffer headerBuf = ByteBuffer.allocate(cryptor.fileHeaderCryptor().headerSize());
        ch.read(headerBuf);
        headerBuf.flip();
        LOG.info("\tHeader read");
        FileHeader header = cryptor.fileHeaderCryptor().decryptHeader(headerBuf);
        long cleartextSize = header.getFilesize();
        if (cleartextSize < 0) {
            LOG.info("\tSkipping already migrated file");
            return;
        } else if (cleartextSize > attrs.size()) {
            LOG.warn("\tSkipping file with invalid file size {}/{}", cleartextSize, attrs.size());
            return;
        }
        int headerSize = cryptor.fileHeaderCryptor().headerSize();
        int ciphertextChunkSize = cryptor.fileContentCryptor().ciphertextChunkSize();
        int cleartextChunkSize = cryptor.fileContentCryptor().cleartextChunkSize();
        long newCiphertextSize = Cryptors.ciphertextSize(cleartextSize, cryptor);
        long newEOF = headerSize + newCiphertextSize;
        // int-truncation
        long newFullChunks = newCiphertextSize / ciphertextChunkSize;
        long newAdditionalCiphertextBytes = newCiphertextSize % ciphertextChunkSize;
        if (newAdditionalCiphertextBytes == 0) {
            // (new) last block is already correct. just truncate:
            LOG.info("\tMigrating cleartext size {}: Truncating to new ciphertext size: {}", cleartextSize, newEOF);
            ch.truncate(newEOF);
            LOG.info("\tFile truncated");
        } else {
            // last block may contain padding and needs to be re-encrypted:
            long lastChunkIdx = newFullChunks;
            LOG.info("\tMigrating cleartext size {}: Re-encrypting chunk {}. New ciphertext size: {}", cleartextSize, lastChunkIdx, newEOF);
            long beginOfLastChunk = headerSize + lastChunkIdx * ciphertextChunkSize;
            assert beginOfLastChunk < newEOF;
            int lastCleartextChunkLength = (int) (cleartextSize % cleartextChunkSize);
            assert lastCleartextChunkLength < cleartextChunkSize;
            assert lastCleartextChunkLength > 0;
            ch.position(beginOfLastChunk);
            ByteBuffer lastCiphertextChunk = ByteBuffer.allocate(ciphertextChunkSize);
            int read = ch.read(lastCiphertextChunk);
            if (read != -1) {
                lastCiphertextChunk.flip();
                ByteBuffer lastCleartextChunk = cryptor.fileContentCryptor().decryptChunk(lastCiphertextChunk, lastChunkIdx, header, true);
                lastCleartextChunk.position(0).limit(lastCleartextChunkLength);
                assert lastCleartextChunk.remaining() == lastCleartextChunkLength;
                ByteBuffer newLastChunkCiphertext = cryptor.fileContentCryptor().encryptChunk(lastCleartextChunk, lastChunkIdx, header);
                ch.truncate(beginOfLastChunk);
                ch.write(newLastChunkCiphertext);
            } else {
                LOG.error("\tReached EOF at position {}/{}", beginOfLastChunk, newEOF);
                // must exit method before changing header!
                return;
            }
            LOG.info("\tReencrypted last block");
        }
        header.setFilesize(-1l);
        ByteBuffer newHeaderBuf = cryptor.fileHeaderCryptor().encryptHeader(header);
        ch.position(0);
        ch.write(newHeaderBuf);
        LOG.info("\tUpdated header");
    }
    LOG.info("Finished migration of {}.", file);
}
Also used : FileChannel(java.nio.channels.FileChannel) ByteBuffer(java.nio.ByteBuffer) FileHeader(org.cryptomator.cryptolib.api.FileHeader)

Example 24 with FileChannel

use of java.nio.channels.FileChannel in project che by eclipse.

the class IoUtil method nioCopyFile.

private static void nioCopyFile(File source, File target, boolean replaceIfExists) throws IOException {
    if (// atomic
    !target.createNewFile()) {
        if (target.exists() && !replaceIfExists) {
            throw new IOException(String.format("File '%s' already exists. ", target.getAbsolutePath()));
        }
    }
    try (FileInputStream sourceStream = new FileInputStream(source);
        FileOutputStream targetStream = new FileOutputStream(target);
        FileChannel sourceChannel = sourceStream.getChannel();
        FileChannel targetChannel = targetStream.getChannel()) {
        final long size = sourceChannel.size();
        long transferred = 0L;
        while (transferred < size) {
            transferred += targetChannel.transferFrom(sourceChannel, transferred, (size - transferred));
        }
    }
}
Also used : FileChannel(java.nio.channels.FileChannel) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream)

Example 25 with FileChannel

use of java.nio.channels.FileChannel in project druid by druid-io.

the class LongCompressionBenchmarkFileGenerator method main.

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];
    }
    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.LONG, true, 1, 0d, ImmutableList.<Object>of(0, 1, 2, 3, 4), ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1, 1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1, 1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.LONG, true, 1, 0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeDiscreteUniform("", ValueType.LONG, true, 1, 0d, 0, 1000);
    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));
    File dir = new File(dirPath);
    dir.mkdir();
    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((long) entry.getValue().generateRowValue() + "\n");
            }
        }
    }
    // create compressed files using all combinations of CompressionStrategy and LongEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
                String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString();
                System.out.print(name + ": ");
                File compFile = new File(dir, name);
                compFile.delete();
                File dataFile = new File(dir, entry.getKey());
                TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
                LongSupplierSerializer writer = CompressionFactory.getLongSerializer(iopeon, "long", ByteOrder.nativeOrder(), encoding, compression);
                BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));
                try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) {
                    writer.open();
                    String line;
                    while ((line = br.readLine()) != null) {
                        writer.add(Long.parseLong(line));
                    }
                    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    writer.closeAndConsolidate(new ByteSink() {

                        @Override
                        public OutputStream openStream() throws IOException {
                            return baos;
                        }
                    });
                    output.write(ByteBuffer.wrap(baos.toByteArray()));
                } finally {
                    iopeon.close();
                    br.close();
                }
                System.out.print(compFile.length() / 1024 + "\n");
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) ByteArrayOutputStream(java.io.ByteArrayOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) CompressedObjectStrategy(io.druid.segment.data.CompressedObjectStrategy) BufferedWriter(java.io.BufferedWriter) ByteSink(com.google.common.io.ByteSink) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) InputStreamReader(java.io.InputStreamReader) FileChannel(java.nio.channels.FileChannel) BenchmarkColumnValueGenerator(io.druid.benchmark.datagen.BenchmarkColumnValueGenerator) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) FileOutputStream(java.io.FileOutputStream) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) CompressionFactory(io.druid.segment.data.CompressionFactory) BenchmarkColumnSchema(io.druid.benchmark.datagen.BenchmarkColumnSchema) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter) Writer(java.io.Writer) LongSupplierSerializer(io.druid.segment.data.LongSupplierSerializer)

Aggregations

FileChannel (java.nio.channels.FileChannel)629 IOException (java.io.IOException)227 ByteBuffer (java.nio.ByteBuffer)205 File (java.io.File)185 FileInputStream (java.io.FileInputStream)164 FileOutputStream (java.io.FileOutputStream)147 RandomAccessFile (java.io.RandomAccessFile)144 Test (org.junit.Test)95 MappedByteBuffer (java.nio.MappedByteBuffer)78 Path (java.nio.file.Path)37 FileLock (java.nio.channels.FileLock)32 FileNotFoundException (java.io.FileNotFoundException)29 Random (java.util.Random)12 OutputStream (java.io.OutputStream)11 ArrayList (java.util.ArrayList)11 AsynchronousFileChannel (java.nio.channels.AsynchronousFileChannel)10 OverlappingFileLockException (java.nio.channels.OverlappingFileLockException)10 LinkedList (java.util.LinkedList)10 ProjectWorkspace (com.facebook.buck.testutil.integration.ProjectWorkspace)9 BufferedReader (java.io.BufferedReader)9