Search in sources :

Example 36 with FileContext

use of org.apache.hadoop.fs.FileContext in project apex-malhar by apache.

the class FileSystemWALTest method testFinalizeWithDelete.

@Test
public void testFinalizeWithDelete() throws IOException {
    testMeta.fsWAL.setMaxLength(2 * 1024);
    testMeta.fsWAL.setup();
    FileSystemWAL.FileSystemWALWriter fsWALWriter = testMeta.fsWAL.getWriter();
    write1KRecords(fsWALWriter, 2);
    testMeta.fsWAL.beforeCheckpoint(0);
    write1KRecords(fsWALWriter, 2);
    testMeta.fsWAL.beforeCheckpoint(1);
    write1KRecords(fsWALWriter, 2);
    testMeta.fsWAL.beforeCheckpoint(2);
    FileSystemWAL.FileSystemWALReader fsWALReader = testMeta.fsWAL.getReader();
    assertNumTuplesRead(fsWALReader, 6);
    testMeta.fsWAL.committed(0);
    fsWALWriter.delete(new FileSystemWAL.FileSystemWALPointer(2, 0));
    FileContext fileContext = FileContextUtils.getFileContext(testMeta.fsWAL.getFilePath());
    Assert.assertTrue("part 0 exists ", !fileContext.util().exists(new Path(testMeta.fsWAL.getPartFilePath(0))));
    testMeta.fsWAL.committed(1);
    Assert.assertTrue("part 1 exists ", !fileContext.util().exists(new Path(testMeta.fsWAL.getPartFilePath(1))));
    fsWALReader.seek(fsWALReader.getStartPointer());
    assertNumTuplesRead(fsWALReader, 2);
}
Also used : Path(org.apache.hadoop.fs.Path) FileContext(org.apache.hadoop.fs.FileContext) Test(org.junit.Test)

Example 37 with FileContext

use of org.apache.hadoop.fs.FileContext in project elasticsearch by elastic.

the class HdfsRepository method doStart.

@Override
protected void doStart() {
    String uriSetting = getMetadata().settings().get("uri");
    if (Strings.hasText(uriSetting) == false) {
        throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore");
    }
    URI uri = URI.create(uriSetting);
    if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) {
        throw new IllegalArgumentException(String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting));
    }
    if (Strings.hasLength(uri.getPath()) && uri.getPath().equals("/") == false) {
        throw new IllegalArgumentException(String.format(Locale.ROOT, "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting));
    }
    String pathSetting = getMetadata().settings().get("path");
    // get configuration
    if (pathSetting == null) {
        throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore");
    }
    int bufferSize = getMetadata().settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt();
    try {
        // initialize our filecontext
        SpecialPermission.check();
        FileContext fileContext = AccessController.doPrivileged((PrivilegedAction<FileContext>) () -> createContext(uri, getMetadata().settings()));
        blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize);
        logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), pathSetting);
    } catch (IOException e) {
        throw new ElasticsearchGenerationException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e);
    }
    super.doStart();
}
Also used : IOException(java.io.IOException) URI(java.net.URI) FileContext(org.apache.hadoop.fs.FileContext) ElasticsearchGenerationException(org.elasticsearch.ElasticsearchGenerationException)

Example 38 with FileContext

use of org.apache.hadoop.fs.FileContext in project elasticsearch by elastic.

the class HdfsRepository method createContext.

// create hadoop filecontext
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)")
private static FileContext createContext(URI uri, Settings repositorySettings) {
    Configuration cfg = new Configuration(repositorySettings.getAsBoolean("load_defaults", true));
    cfg.setClassLoader(HdfsRepository.class.getClassLoader());
    cfg.reloadConfiguration();
    Map<String, String> map = repositorySettings.getByPrefix("conf.").getAsMap();
    for (Entry<String, String> entry : map.entrySet()) {
        cfg.set(entry.getKey(), entry.getValue());
    }
    // create a hadoop user. if we want some auth, it must be done different anyway, and tested.
    Subject subject;
    try {
        Class<?> clazz = Class.forName("org.apache.hadoop.security.User");
        Constructor<?> ctor = clazz.getConstructor(String.class);
        ctor.setAccessible(true);
        Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name"));
        subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet());
    } catch (ReflectiveOperationException e) {
        throw new RuntimeException(e);
    }
    // disable FS cache
    cfg.setBoolean("fs.hdfs.impl.disable.cache", true);
    // create the filecontext with our user
    return Subject.doAs(subject, (PrivilegedAction<FileContext>) () -> {
        try {
            AbstractFileSystem fs = AbstractFileSystem.get(uri, cfg);
            return FileContext.getFileContext(fs, cfg);
        } catch (UnsupportedFileSystemException e) {
            throw new RuntimeException(e);
        }
    });
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Subject(javax.security.auth.Subject) AbstractFileSystem(org.apache.hadoop.fs.AbstractFileSystem) UnsupportedFileSystemException(org.apache.hadoop.fs.UnsupportedFileSystemException) Principal(java.security.Principal) FileContext(org.apache.hadoop.fs.FileContext) SuppressForbidden(org.elasticsearch.common.SuppressForbidden)

Example 39 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestEntityGroupFSTimelineStore method tearDownClass.

@AfterClass
public static void tearDownClass() throws Exception {
    hdfsCluster.shutdown();
    FileContext fileContext = FileContext.getLocalFSFileContext();
    fileContext.delete(new Path(config.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH)), true);
}
Also used : Path(org.apache.hadoop.fs.Path) FileContext(org.apache.hadoop.fs.FileContext) AfterClass(org.junit.AfterClass)

Example 40 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestFSDownload method deleteTestDir.

@AfterClass
public static void deleteTestDir() throws IOException {
    FileContext fs = FileContext.getLocalFSFileContext();
    fs.delete(new Path("target", TestFSDownload.class.getSimpleName()), true);
}
Also used : Path(org.apache.hadoop.fs.Path) FileContext(org.apache.hadoop.fs.FileContext) AfterClass(org.junit.AfterClass)

Aggregations

FileContext (org.apache.hadoop.fs.FileContext)84 Path (org.apache.hadoop.fs.Path)71 Test (org.junit.Test)34 Configuration (org.apache.hadoop.conf.Configuration)33 IOException (java.io.IOException)29 File (java.io.File)16 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)14 FileStatus (org.apache.hadoop.fs.FileStatus)13 HashMap (java.util.HashMap)12 FsPermission (org.apache.hadoop.fs.permission.FsPermission)10 ArrayList (java.util.ArrayList)9 FileSystem (org.apache.hadoop.fs.FileSystem)8 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)8 ExecutorService (java.util.concurrent.ExecutorService)7 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)7 URISyntaxException (java.net.URISyntaxException)6 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)6 ExecutionException (java.util.concurrent.ExecutionException)6 Future (java.util.concurrent.Future)6 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)6