Search in sources :

Example 86 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project cdap by caskdata.

the class MasterServiceMain method createDirectory.

private void createDirectory(FileContext fileContext, String path) {
    try {
        org.apache.hadoop.fs.Path fPath = new org.apache.hadoop.fs.Path(path);
        boolean dirExists = checkDirectoryExists(fileContext, fPath);
        if (!dirExists) {
            FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL);
            // file context does ( permission AND  (NOT of umask) ) and uses that as permission, by default umask is 022,
            // if we want 777 permission, we have to set umask to 000
            fileContext.setUMask(new FsPermission(FsAction.NONE, FsAction.NONE, FsAction.NONE));
            fileContext.mkdir(fPath, permission, true);
        }
    } catch (FileAlreadyExistsException e) {
    // should not happen as we create only if dir exists
    } catch (AccessControlException | ParentNotDirectoryException | FileNotFoundException e) {
        // just log the exception
        LOG.error("Exception while trying to create directory at {}", path, e);
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}
Also used : Path(java.nio.file.Path) FileAlreadyExistsException(org.apache.hadoop.fs.FileAlreadyExistsException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) ParentNotDirectoryException(org.apache.hadoop.fs.ParentNotDirectoryException) FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Example 87 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project drill by axbaretto.

the class DrillSqlWorker method getQueryPlan.

/**
 * Converts sql query string into query physical plan.
 *
 * @param context query context
 * @param sql sql query
 * @param textPlan text plan
 * @return query physical plan
 */
private static PhysicalPlan getQueryPlan(QueryContext context, String sql, Pointer<String> textPlan) throws ForemanSetupException {
    final SqlConverter parser = new SqlConverter(context);
    injector.injectChecked(context.getExecutionControls(), "sql-parsing", ForemanSetupException.class);
    final SqlNode sqlNode = parser.parse(sql);
    final AbstractSqlHandler handler;
    final SqlHandlerConfig config = new SqlHandlerConfig(context, parser);
    switch(sqlNode.getKind()) {
        case EXPLAIN:
            handler = new ExplainHandler(config, textPlan);
            break;
        case SET_OPTION:
            handler = new SetOptionHandler(context);
            break;
        case DESCRIBE_TABLE:
            if (sqlNode instanceof DrillSqlDescribeTable) {
                handler = new DescribeTableHandler(config);
                break;
            }
        case DESCRIBE_SCHEMA:
            if (sqlNode instanceof SqlDescribeSchema) {
                handler = new DescribeSchemaHandler(config);
                break;
            }
        case OTHER:
            if (sqlNode instanceof SqlCreateTable) {
                handler = ((DrillSqlCall) sqlNode).getSqlHandler(config, textPlan);
                break;
            }
            if (sqlNode instanceof DrillSqlCall) {
                handler = ((DrillSqlCall) sqlNode).getSqlHandler(config);
                break;
            }
        // fallthrough
        default:
            handler = new DefaultSqlHandler(config, textPlan);
    }
    try {
        return handler.getPlan(sqlNode);
    } catch (ValidationException e) {
        String errorMessage = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
        throw UserException.validationError(e).message(errorMessage).build(logger);
    } catch (AccessControlException e) {
        throw UserException.permissionError(e).build(logger);
    } catch (SqlUnsupportedException e) {
        throw UserException.unsupportedError(e).build(logger);
    } catch (IOException | RelConversionException e) {
        throw new QueryInputException("Failure handling SQL.", e);
    }
}
Also used : DefaultSqlHandler(org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler) ExplainHandler(org.apache.drill.exec.planner.sql.handlers.ExplainHandler) ValidationException(org.apache.calcite.tools.ValidationException) DescribeTableHandler(org.apache.drill.exec.planner.sql.handlers.DescribeTableHandler) SetOptionHandler(org.apache.drill.exec.planner.sql.handlers.SetOptionHandler) AccessControlException(org.apache.hadoop.security.AccessControlException) SqlHandlerConfig(org.apache.drill.exec.planner.sql.handlers.SqlHandlerConfig) IOException(java.io.IOException) SqlDescribeSchema(org.apache.calcite.sql.SqlDescribeSchema) DescribeSchemaHandler(org.apache.drill.exec.planner.sql.handlers.DescribeSchemaHandler) SqlCreateTable(org.apache.drill.exec.planner.sql.parser.SqlCreateTable) RelConversionException(org.apache.calcite.tools.RelConversionException) DrillSqlDescribeTable(org.apache.drill.exec.planner.sql.parser.DrillSqlDescribeTable) AbstractSqlHandler(org.apache.drill.exec.planner.sql.handlers.AbstractSqlHandler) SqlUnsupportedException(org.apache.drill.exec.work.foreman.SqlUnsupportedException) DrillSqlCall(org.apache.drill.exec.planner.sql.parser.DrillSqlCall) SqlNode(org.apache.calcite.sql.SqlNode)

Example 88 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.

the class CrailHDFS method getFileStatus.

@Override
public FileStatus getFileStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    CrailNode directFile = null;
    try {
        directFile = dfs.lookup(path.toUri().getRawPath()).get();
    } catch (Exception e) {
        throw new IOException(e);
    }
    if (directFile == null) {
        throw new FileNotFoundException("filename " + path);
    }
    FsPermission permission = FsPermission.getFileDefault();
    if (directFile.getType().isDirectory()) {
        permission = FsPermission.getDirDefault();
    }
    FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, path.makeQualified(this.getUri(), this.workingDir));
    return status;
}
Also used : FileStatus(org.apache.hadoop.fs.FileStatus) CrailNode(org.apache.crail.CrailNode) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) URISyntaxException(java.net.URISyntaxException) UnresolvedLinkException(org.apache.hadoop.fs.UnresolvedLinkException) ParentNotDirectoryException(org.apache.hadoop.fs.ParentNotDirectoryException) FileAlreadyExistsException(org.apache.hadoop.fs.FileAlreadyExistsException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) UnsupportedFileSystemException(org.apache.hadoop.fs.UnsupportedFileSystemException)

Example 89 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.

the class CrailHDFS method open.

@Override
public FSDataInputStream open(Path path, int bufferSize) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    CrailFile fileInfo = null;
    try {
        fileInfo = dfs.lookup(path.toUri().getRawPath()).get().asFile();
    } catch (Exception e) {
        throw new IOException(e);
    }
    CrailBufferedInputStream inputStream = null;
    if (fileInfo != null) {
        try {
            inputStream = fileInfo.getBufferedInputStream(fileInfo.getCapacity());
        } catch (Exception e) {
            throw new IOException(e);
        }
    }
    if (inputStream != null) {
        return new CrailHDFSInputStream(inputStream);
    } else {
        throw new IOException("Failed to open file, path " + path.toString());
    }
}
Also used : CrailFile(org.apache.crail.CrailFile) IOException(java.io.IOException) CrailBufferedInputStream(org.apache.crail.CrailBufferedInputStream) URISyntaxException(java.net.URISyntaxException) UnresolvedLinkException(org.apache.hadoop.fs.UnresolvedLinkException) ParentNotDirectoryException(org.apache.hadoop.fs.ParentNotDirectoryException) FileAlreadyExistsException(org.apache.hadoop.fs.FileAlreadyExistsException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) UnsupportedFileSystemException(org.apache.hadoop.fs.UnsupportedFileSystemException)

Example 90 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.

the class CrailHDFS method listStatus.

@Override
public FileStatus[] listStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    try {
        CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
        Iterator<String> iter = node.asContainer().listEntries();
        ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
        while (iter.hasNext()) {
            String filepath = iter.next();
            CrailNode directFile = dfs.lookup(filepath).get();
            if (directFile != null) {
                FsPermission permission = FsPermission.getFileDefault();
                if (directFile.getType().isDirectory()) {
                    permission = FsPermission.getDirDefault();
                }
                FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, new Path(filepath).makeQualified(this.getUri(), workingDir));
                statusList.add(status);
            }
        }
        FileStatus[] list = new FileStatus[statusList.size()];
        statusList.toArray(list);
        return list;
    } catch (Exception e) {
        throw new FileNotFoundException(path.toUri().getRawPath());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) CrailNode(org.apache.crail.CrailNode) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) URISyntaxException(java.net.URISyntaxException) UnresolvedLinkException(org.apache.hadoop.fs.UnresolvedLinkException) ParentNotDirectoryException(org.apache.hadoop.fs.ParentNotDirectoryException) FileAlreadyExistsException(org.apache.hadoop.fs.FileAlreadyExistsException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) UnsupportedFileSystemException(org.apache.hadoop.fs.UnsupportedFileSystemException)

Aggregations

AccessControlException (org.apache.hadoop.security.AccessControlException)165 Path (org.apache.hadoop.fs.Path)72 IOException (java.io.IOException)69 Test (org.junit.Test)60 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)44 FsPermission (org.apache.hadoop.fs.permission.FsPermission)41 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)35 FileSystem (org.apache.hadoop.fs.FileSystem)33 Configuration (org.apache.hadoop.conf.Configuration)25 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)21 FileNotFoundException (java.io.FileNotFoundException)19 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)13 UnsupportedEncodingException (java.io.UnsupportedEncodingException)11 HashMap (java.util.HashMap)10 FileStatus (org.apache.hadoop.fs.FileStatus)10 ClientResponse (com.sun.jersey.api.client.ClientResponse)9 PrivilegedAction (java.security.PrivilegedAction)9 ParentNotDirectoryException (org.apache.hadoop.fs.ParentNotDirectoryException)9 RESTResponse (org.apache.ranger.admin.client.datatype.RESTResponse)9