Search in sources :

Example 46 with FileStatus

use of org.apache.hadoop.fs.FileStatus in project hadoop by apache.

the class TestWebHdfsFileSystemContract method testRootDir.

public void testRootDir() throws IOException {
    final Path root = new Path("/");
    final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) fs;
    final URL url = webhdfs.toUrl(GetOpParam.Op.NULL, root);
    WebHdfsFileSystem.LOG.info("null url=" + url);
    Assert.assertTrue(url.toString().contains("v1"));
    //test root permission
    final FileStatus status = fs.getFileStatus(root);
    assertTrue(status != null);
    assertEquals(0777, status.getPermission().toShort());
    //delete root
    assertFalse(fs.delete(root, true));
    //create file using root path 
    try {
        final FSDataOutputStream out = fs.create(root);
        out.write(1);
        out.close();
        fail();
    } catch (IOException e) {
        WebHdfsFileSystem.LOG.info("This is expected.", e);
    }
    //open file using root path 
    try {
        final FSDataInputStream in = fs.open(root);
        in.read();
        fail();
    } catch (IOException e) {
        WebHdfsFileSystem.LOG.info("This is expected.", e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) IOException(java.io.IOException) URL(java.net.URL)

Example 47 with FileStatus

use of org.apache.hadoop.fs.FileStatus in project hadoop by apache.

the class TestFSMainOperationsWebHdfs method testConcat.

@Test
public void testConcat() throws Exception {
    Path[] paths = { new Path("/test/hadoop/file1"), new Path("/test/hadoop/file2"), new Path("/test/hadoop/file3") };
    DFSTestUtil.createFile(fSys, paths[0], 1024, (short) 3, 0);
    DFSTestUtil.createFile(fSys, paths[1], 1024, (short) 3, 0);
    DFSTestUtil.createFile(fSys, paths[2], 1024, (short) 3, 0);
    Path catPath = new Path("/test/hadoop/catFile");
    DFSTestUtil.createFile(fSys, catPath, 1024, (short) 3, 0);
    Assert.assertTrue(exists(fSys, catPath));
    fSys.concat(catPath, paths);
    Assert.assertFalse(exists(fSys, paths[0]));
    Assert.assertFalse(exists(fSys, paths[1]));
    Assert.assertFalse(exists(fSys, paths[2]));
    FileStatus fileStatus = fSys.getFileStatus(catPath);
    Assert.assertEquals(1024 * 4, fileStatus.getLen());
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) FSMainOperationsBaseTest(org.apache.hadoop.fs.FSMainOperationsBaseTest) Test(org.junit.Test)

Example 48 with FileStatus

use of org.apache.hadoop.fs.FileStatus in project hadoop by apache.

the class FSOperations method toJson.

/**
   * @param fileStatuses list of FileStatus objects
   * @return JSON map suitable for wire transport
   */
@SuppressWarnings({ "unchecked" })
private static Map<String, Object> toJson(FileStatus[] fileStatuses) {
    Map<String, Object> json = new LinkedHashMap<>();
    Map<String, Object> inner = new LinkedHashMap<>();
    JSONArray statuses = new JSONArray();
    for (FileStatus f : fileStatuses) {
        statuses.add(toJsonInner(f, false));
    }
    inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses);
    json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner);
    return json;
}
Also used : FileStatus(org.apache.hadoop.fs.FileStatus) JSONArray(org.json.simple.JSONArray) JSONObject(org.json.simple.JSONObject) LinkedHashMap(java.util.LinkedHashMap)

Example 49 with FileStatus

use of org.apache.hadoop.fs.FileStatus in project hadoop by apache.

the class BaseTestHttpFSWith method testTruncate.

private void testTruncate() throws Exception {
    if (!isLocalFS()) {
        final short repl = 3;
        final int blockSize = 1024;
        final int numOfBlocks = 2;
        FileSystem fs = FileSystem.get(getProxiedFSConf());
        fs.mkdirs(getProxiedFSTestDir());
        Path file = new Path(getProxiedFSTestDir(), "foo.txt");
        final byte[] data = FileSystemTestHelper.getFileData(numOfBlocks, blockSize);
        FileSystemTestHelper.createFile(fs, file, data, blockSize, repl);
        final int newLength = blockSize;
        boolean isReady = fs.truncate(file, newLength);
        assertTrue("Recovery is not expected.", isReady);
        FileStatus fileStatus = fs.getFileStatus(file);
        assertEquals(fileStatus.getLen(), newLength);
        AppendTestUtil.checkFullFile(fs, file, newLength, data, file.toString());
        fs.close();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) FileSystem(org.apache.hadoop.fs.FileSystem)

Example 50 with FileStatus

use of org.apache.hadoop.fs.FileStatus in project hadoop by apache.

the class BaseTestHttpFSWith method testCreate.

private void testCreate(Path path, boolean override) throws Exception {
    FileSystem fs = getHttpFSFileSystem();
    FsPermission permission = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
    OutputStream os = fs.create(new Path(path.toUri().getPath()), permission, override, 1024, (short) 2, 100 * 1024 * 1024, null);
    os.write(1);
    os.close();
    fs.close();
    fs = FileSystem.get(getProxiedFSConf());
    FileStatus status = fs.getFileStatus(path);
    if (!isLocalFS()) {
        assertEquals(status.getReplication(), 2);
        assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
    }
    assertEquals(status.getPermission(), permission);
    InputStream is = fs.open(path);
    assertEquals(is.read(), 1);
    is.close();
    fs.close();
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) InputStream(java.io.InputStream) FileSystem(org.apache.hadoop.fs.FileSystem) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Aggregations

FileStatus (org.apache.hadoop.fs.FileStatus)1156 Path (org.apache.hadoop.fs.Path)910 FileSystem (org.apache.hadoop.fs.FileSystem)417 Test (org.junit.Test)372 IOException (java.io.IOException)296 Configuration (org.apache.hadoop.conf.Configuration)187 ArrayList (java.util.ArrayList)175 FileNotFoundException (java.io.FileNotFoundException)136 LocatedFileStatus (org.apache.hadoop.fs.LocatedFileStatus)105 FsPermission (org.apache.hadoop.fs.permission.FsPermission)86 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)67 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)65 HashMap (java.util.HashMap)54 File (java.io.File)41 URI (java.net.URI)41 PathFilter (org.apache.hadoop.fs.PathFilter)38 BufferedReader (java.io.BufferedReader)30 InputStreamReader (java.io.InputStreamReader)30 BlockLocation (org.apache.hadoop.fs.BlockLocation)30 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)30