Search in sources :

Example 96 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestDataTransferProtocol method testPacketHeader.

@Test
public void testPacketHeader() throws IOException {
    PacketHeader hdr = new PacketHeader(// size of packet
    4, // OffsetInBlock
    1024, // sequencenumber
    100, // lastPacketInBlock
    false, // chunk length
    4096, false);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    hdr.write(new DataOutputStream(baos));
    // Read back using DataInput
    PacketHeader readBack = new PacketHeader();
    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    readBack.readFields(new DataInputStream(bais));
    assertEquals(hdr, readBack);
    // Read back using ByteBuffer
    readBack = new PacketHeader();
    readBack.readFields(ByteBuffer.wrap(baos.toByteArray()));
    assertEquals(hdr, readBack);
    assertTrue(hdr.sanityCheck(99));
    assertFalse(hdr.sanityCheck(100));
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputStream(java.io.DataOutputStream) PacketHeader(org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Test(org.junit.Test)

Example 97 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestDFSOutputStream method testCongestionBackoff.

@Test
public void testCongestionBackoff() throws IOException {
    DfsClientConf dfsClientConf = mock(DfsClientConf.class);
    DFSClient client = mock(DFSClient.class);
    when(client.getConf()).thenReturn(dfsClientConf);
    when(client.getTracer()).thenReturn(FsTracer.get(new Configuration()));
    client.clientRunning = true;
    DataStreamer stream = new DataStreamer(mock(HdfsFileStatus.class), mock(ExtendedBlock.class), client, "foo", null, null, null, null, null, null);
    DataOutputStream blockStream = mock(DataOutputStream.class);
    doThrow(new IOException()).when(blockStream).flush();
    Whitebox.setInternalState(stream, "blockStream", blockStream);
    Whitebox.setInternalState(stream, "stage", BlockConstructionStage.PIPELINE_CLOSE);
    @SuppressWarnings("unchecked") LinkedList<DFSPacket> dataQueue = (LinkedList<DFSPacket>) Whitebox.getInternalState(stream, "dataQueue");
    @SuppressWarnings("unchecked") ArrayList<DatanodeInfo> congestedNodes = (ArrayList<DatanodeInfo>) Whitebox.getInternalState(stream, "congestedNodes");
    congestedNodes.add(mock(DatanodeInfo.class));
    DFSPacket packet = mock(DFSPacket.class);
    when(packet.getTraceParents()).thenReturn(new SpanId[] {});
    dataQueue.add(packet);
    stream.run();
    Assert.assertTrue(congestedNodes.isEmpty());
}
Also used : DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) Configuration(org.apache.hadoop.conf.Configuration) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DataOutputStream(java.io.DataOutputStream) ExtendedBlock(org.apache.hadoop.hdfs.protocol.ExtendedBlock) ArrayList(java.util.ArrayList) IOException(java.io.IOException) LinkedList(java.util.LinkedList) DfsClientConf(org.apache.hadoop.hdfs.client.impl.DfsClientConf) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) Test(org.junit.Test)

Example 98 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestDFSRemove method createFile.

static void createFile(FileSystem fs, Path f) throws IOException {
    DataOutputStream a_out = fs.create(f);
    a_out.writeBytes("something");
    a_out.close();
}
Also used : DataOutputStream(java.io.DataOutputStream)

Example 99 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestDFSRename method testRename.

@Test
public void testRename() throws Exception {
    Configuration conf = new HdfsConfiguration();
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
    try {
        FileSystem fs = cluster.getFileSystem();
        assertTrue(fs.mkdirs(dir));
        {
            //test lease
            Path a = new Path(dir, "a");
            Path aa = new Path(dir, "aa");
            Path b = new Path(dir, "b");
            createFile(fs, a);
            //should not have any lease
            assertEquals(0, countLease(cluster));
            DataOutputStream aa_out = fs.create(aa);
            aa_out.writeBytes("something");
            //should have 1 lease
            assertEquals(1, countLease(cluster));
            list(fs, "rename0");
            fs.rename(a, b);
            list(fs, "rename1");
            aa_out.writeBytes(" more");
            aa_out.close();
            list(fs, "rename2");
            //should not have any lease
            assertEquals(0, countLease(cluster));
        }
        {
            // test non-existent destination
            Path dstPath = new Path("/c/d");
            assertFalse(fs.exists(dstPath));
            assertFalse(fs.rename(dir, dstPath));
        }
        {
            // dst cannot be a file or directory under src
            // test rename /a/b/foo to /a/b/c
            Path src = new Path("/a/b");
            Path dst = new Path("/a/b/c");
            createFile(fs, new Path(src, "foo"));
            // dst cannot be a file under src
            assertFalse(fs.rename(src, dst));
            // dst cannot be a directory under src
            assertFalse(fs.rename(src.getParent(), dst.getParent()));
        }
        {
            // dst can start with src, if it is not a directory or file under src
            // test rename /test /testfile
            Path src = new Path("/testPrefix");
            Path dst = new Path("/testPrefixfile");
            createFile(fs, src);
            assertTrue(fs.rename(src, dst));
        }
        {
            // dst should not be same as src test rename /a/b/c to /a/b/c
            Path src = new Path("/a/b/c");
            createFile(fs, src);
            assertTrue(fs.rename(src, src));
            assertFalse(fs.rename(new Path("/a/b"), new Path("/a/b/")));
            assertTrue(fs.rename(src, new Path("/a/b/c/")));
        }
        fs.delete(dir, true);
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) DataOutputStream(java.io.DataOutputStream) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.junit.Test)

Example 100 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TextOutputFormat method getRecordWriter.

public RecordWriter<K, V> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {
    Configuration conf = job.getConfiguration();
    boolean isCompressed = getCompressOutput(job);
    String keyValueSeparator = conf.get(SEPERATOR, "\t");
    CompressionCodec codec = null;
    String extension = "";
    if (isCompressed) {
        Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, GzipCodec.class);
        codec = ReflectionUtils.newInstance(codecClass, conf);
        extension = codec.getDefaultExtension();
    }
    Path file = getDefaultWorkFile(job, extension);
    FileSystem fs = file.getFileSystem(conf);
    FSDataOutputStream fileOut = fs.create(file, false);
    if (isCompressed) {
        return new LineRecordWriter<>(new DataOutputStream(codec.createOutputStream(fileOut)), keyValueSeparator);
    } else {
        return new LineRecordWriter<>(fileOut, keyValueSeparator);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DataOutputStream(java.io.DataOutputStream) FileSystem(org.apache.hadoop.fs.FileSystem) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream)

Aggregations

DataOutputStream (java.io.DataOutputStream)2968 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1314 IOException (java.io.IOException)1024 Test (org.junit.Test)633 DataInputStream (java.io.DataInputStream)615 FileOutputStream (java.io.FileOutputStream)427 ByteArrayInputStream (java.io.ByteArrayInputStream)411 File (java.io.File)281 BufferedOutputStream (java.io.BufferedOutputStream)228 UnitTest (org.apache.geode.test.junit.categories.UnitTest)172 URL (java.net.URL)149 InputStreamReader (java.io.InputStreamReader)146 BufferedReader (java.io.BufferedReader)142 Path (org.apache.hadoop.fs.Path)137 DataInput (java.io.DataInput)124 ArrayList (java.util.ArrayList)122 HttpURLConnection (java.net.HttpURLConnection)120 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)117 FileInputStream (java.io.FileInputStream)107 InputStream (java.io.InputStream)107