use of org.apache.hadoop.security.AccessControlException in project cdap by caskdata.
the class MasterServiceMain method createDirectory.
private void createDirectory(FileContext fileContext, String path) {
try {
org.apache.hadoop.fs.Path fPath = new org.apache.hadoop.fs.Path(path);
boolean dirExists = checkDirectoryExists(fileContext, fPath);
if (!dirExists) {
FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL);
// file context does ( permission AND (NOT of umask) ) and uses that as permission, by default umask is 022,
// if we want 777 permission, we have to set umask to 000
fileContext.setUMask(new FsPermission(FsAction.NONE, FsAction.NONE, FsAction.NONE));
fileContext.mkdir(fPath, permission, true);
}
} catch (FileAlreadyExistsException e) {
// should not happen as we create only if dir exists
} catch (AccessControlException | ParentNotDirectoryException | FileNotFoundException e) {
// just log the exception
LOG.error("Exception while trying to create directory at {}", path, e);
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
use of org.apache.hadoop.security.AccessControlException in project drill by axbaretto.
the class DrillSqlWorker method getQueryPlan.
/**
* Converts sql query string into query physical plan.
*
* @param context query context
* @param sql sql query
* @param textPlan text plan
* @return query physical plan
*/
private static PhysicalPlan getQueryPlan(QueryContext context, String sql, Pointer<String> textPlan) throws ForemanSetupException {
final SqlConverter parser = new SqlConverter(context);
injector.injectChecked(context.getExecutionControls(), "sql-parsing", ForemanSetupException.class);
final SqlNode sqlNode = parser.parse(sql);
final AbstractSqlHandler handler;
final SqlHandlerConfig config = new SqlHandlerConfig(context, parser);
switch(sqlNode.getKind()) {
case EXPLAIN:
handler = new ExplainHandler(config, textPlan);
break;
case SET_OPTION:
handler = new SetOptionHandler(context);
break;
case DESCRIBE_TABLE:
if (sqlNode instanceof DrillSqlDescribeTable) {
handler = new DescribeTableHandler(config);
break;
}
case DESCRIBE_SCHEMA:
if (sqlNode instanceof SqlDescribeSchema) {
handler = new DescribeSchemaHandler(config);
break;
}
case OTHER:
if (sqlNode instanceof SqlCreateTable) {
handler = ((DrillSqlCall) sqlNode).getSqlHandler(config, textPlan);
break;
}
if (sqlNode instanceof DrillSqlCall) {
handler = ((DrillSqlCall) sqlNode).getSqlHandler(config);
break;
}
// fallthrough
default:
handler = new DefaultSqlHandler(config, textPlan);
}
try {
return handler.getPlan(sqlNode);
} catch (ValidationException e) {
String errorMessage = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
throw UserException.validationError(e).message(errorMessage).build(logger);
} catch (AccessControlException e) {
throw UserException.permissionError(e).build(logger);
} catch (SqlUnsupportedException e) {
throw UserException.unsupportedError(e).build(logger);
} catch (IOException | RelConversionException e) {
throw new QueryInputException("Failure handling SQL.", e);
}
}
use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.
the class CrailHDFS method getFileStatus.
@Override
public FileStatus getFileStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
CrailNode directFile = null;
try {
directFile = dfs.lookup(path.toUri().getRawPath()).get();
} catch (Exception e) {
throw new IOException(e);
}
if (directFile == null) {
throw new FileNotFoundException("filename " + path);
}
FsPermission permission = FsPermission.getFileDefault();
if (directFile.getType().isDirectory()) {
permission = FsPermission.getDirDefault();
}
FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, path.makeQualified(this.getUri(), this.workingDir));
return status;
}
use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.
the class CrailHDFS method open.
@Override
public FSDataInputStream open(Path path, int bufferSize) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
CrailFile fileInfo = null;
try {
fileInfo = dfs.lookup(path.toUri().getRawPath()).get().asFile();
} catch (Exception e) {
throw new IOException(e);
}
CrailBufferedInputStream inputStream = null;
if (fileInfo != null) {
try {
inputStream = fileInfo.getBufferedInputStream(fileInfo.getCapacity());
} catch (Exception e) {
throw new IOException(e);
}
}
if (inputStream != null) {
return new CrailHDFSInputStream(inputStream);
} else {
throw new IOException("Failed to open file, path " + path.toString());
}
}
use of org.apache.hadoop.security.AccessControlException in project incubator-crail by apache.
the class CrailHDFS method listStatus.
@Override
public FileStatus[] listStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
try {
CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
Iterator<String> iter = node.asContainer().listEntries();
ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
while (iter.hasNext()) {
String filepath = iter.next();
CrailNode directFile = dfs.lookup(filepath).get();
if (directFile != null) {
FsPermission permission = FsPermission.getFileDefault();
if (directFile.getType().isDirectory()) {
permission = FsPermission.getDirDefault();
}
FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, new Path(filepath).makeQualified(this.getUri(), workingDir));
statusList.add(status);
}
}
FileStatus[] list = new FileStatus[statusList.size()];
statusList.toArray(list);
return list;
} catch (Exception e) {
throw new FileNotFoundException(path.toUri().getRawPath());
}
}
Aggregations