use of org.apache.hadoop.fs.PathNotFoundException in project hadoop by apache.
the class RegistryUtils method statChildren.
/**
* List children of a directory and retrieve their
* {@link RegistryPathStatus} values.
* <p>
* This is not an atomic operation; A child may be deleted
* during the iteration through the child entries. If this happens,
* the <code>PathNotFoundException</code> is caught and that child
* entry ommitted.
*
* @param path path
* @return a possibly empty map of child entries listed by
* their short name.
* @throws PathNotFoundException path is not in the registry.
* @throws InvalidPathnameException the path is invalid.
* @throws IOException Any other IO Exception
*/
public static Map<String, RegistryPathStatus> statChildren(RegistryOperations registryOperations, String path) throws PathNotFoundException, InvalidPathnameException, IOException {
List<String> childNames = registryOperations.list(path);
Map<String, RegistryPathStatus> results = new HashMap<String, RegistryPathStatus>();
for (String childName : childNames) {
String child = join(path, childName);
try {
RegistryPathStatus stat = registryOperations.stat(child);
results.put(childName, stat);
} catch (PathNotFoundException pnfe) {
if (LOG.isDebugEnabled()) {
LOG.debug("stat failed on {}: moved? {}", child, pnfe, pnfe);
}
// and continue
}
}
return results;
}
use of org.apache.hadoop.fs.PathNotFoundException in project hadoop by apache.
the class CuratorService method operationFailure.
/**
* Create an IOE when an operation fails
* @param path path of operation
* @param operation operation attempted
* @param exception caught the exception caught
* @return an IOE to throw that contains the path and operation details.
*/
protected IOException operationFailure(String path, String operation, Exception exception, List<ACL> acls) {
IOException ioe;
String aclList = "[" + RegistrySecurity.aclsToString(acls) + "]";
if (exception instanceof KeeperException.NoNodeException) {
ioe = new PathNotFoundException(path);
} else if (exception instanceof KeeperException.NodeExistsException) {
ioe = new FileAlreadyExistsException(path);
} else if (exception instanceof KeeperException.NoAuthException) {
ioe = new NoPathPermissionsException(path, "Not authorized to access path; ACLs: " + aclList);
} else if (exception instanceof KeeperException.NotEmptyException) {
ioe = new PathIsNotEmptyDirectoryException(path);
} else if (exception instanceof KeeperException.AuthFailedException) {
ioe = new AuthenticationFailedException(path, "Authentication Failed: " + exception + "; " + securityConnectionDiagnostics, exception);
} else if (exception instanceof KeeperException.NoChildrenForEphemeralsException) {
ioe = new NoChildrenForEphemeralsException(path, "Cannot create a path under an ephemeral node: " + exception, exception);
} else if (exception instanceof KeeperException.InvalidACLException) {
// this is a security exception of a kind
// include the ACLs to help the diagnostics
StringBuilder builder = new StringBuilder();
builder.append("Path access failure ").append(aclList);
builder.append(" ");
builder.append(securityConnectionDiagnostics);
ioe = new NoPathPermissionsException(path, builder.toString());
} else {
ioe = new RegistryIOException(path, "Failure of " + operation + " on " + path + ": " + exception.toString(), exception);
}
if (ioe.getCause() == null) {
ioe.initCause(exception);
}
return ioe;
}
use of org.apache.hadoop.fs.PathNotFoundException in project hadoop by apache.
the class CuratorService method zkStat.
/**
* Stat the file
* @param path path of operation
* @return a curator stat entry
* @throws IOException on a failure
* @throws PathNotFoundException if the path was not found
*/
public Stat zkStat(String path) throws IOException {
checkServiceLive();
String fullpath = createFullPath(path);
Stat stat;
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Stat {}", fullpath);
}
stat = curator.checkExists().forPath(fullpath);
} catch (Exception e) {
throw operationFailure(fullpath, "read()", e);
}
if (stat == null) {
throw new PathNotFoundException(path);
}
return stat;
}
use of org.apache.hadoop.fs.PathNotFoundException in project hadoop by apache.
the class RegistryAdminService method purge.
/**
* Recursive operation to purge all matching records under a base path.
* <ol>
* <li>Uses a depth first search</li>
* <li>A match is on ID and persistence policy, or, if policy==-1, any match</li>
* <li>If a record matches then it is deleted without any child searches</li>
* <li>Deletions will be asynchronous if a callback is provided</li>
* </ol>
*
* The code is designed to be robust against parallel deletions taking place;
* in such a case it will stop attempting that part of the tree. This
* avoid the situation of more than 1 purge happening in parallel and
* one of the purge operations deleteing the node tree above the other.
* @param path base path
* @param selector selector for the purge policy
* @param purgePolicy what to do if there is a matching record with children
* @param callback optional curator callback
* @return the number of delete operations perfomed. As deletes may be for
* everything under a path, this may be less than the number of records
* actually deleted
* @throws IOException problems
* @throws PathIsNotEmptyDirectoryException if an entry cannot be deleted
* as it has children and the purge policy is FailOnChildren
*/
@VisibleForTesting
public int purge(String path, NodeSelector selector, PurgePolicy purgePolicy, BackgroundCallback callback) throws IOException {
boolean toDelete = false;
// look at self to see if it has a service record
Map<String, RegistryPathStatus> childEntries;
Collection<RegistryPathStatus> entries;
try {
// list this path's children
childEntries = RegistryUtils.statChildren(this, path);
entries = childEntries.values();
} catch (PathNotFoundException e) {
// exit
return 0;
}
try {
RegistryPathStatus registryPathStatus = stat(path);
ServiceRecord serviceRecord = resolve(path);
// there is now an entry here.
toDelete = selector.shouldSelect(path, registryPathStatus, serviceRecord);
} catch (EOFException ignored) {
// ignore
} catch (InvalidRecordException ignored) {
// ignore
} catch (NoRecordException ignored) {
// ignore
} catch (PathNotFoundException e) {
// exit
return 0;
}
if (toDelete && !entries.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Match on record @ {} with children ", path);
}
// there's children
switch(purgePolicy) {
case SkipOnChildren:
// don't do the deletion... continue to next record
if (LOG.isDebugEnabled()) {
LOG.debug("Skipping deletion");
}
toDelete = false;
break;
case PurgeAll:
// mark for deletion
if (LOG.isDebugEnabled()) {
LOG.debug("Scheduling for deletion with children");
}
toDelete = true;
entries = new ArrayList<RegistryPathStatus>(0);
break;
case FailOnChildren:
if (LOG.isDebugEnabled()) {
LOG.debug("Failing deletion operation");
}
throw new PathIsNotEmptyDirectoryException(path);
}
}
int deleteOps = 0;
if (toDelete) {
try {
zkDelete(path, true, callback);
} catch (PathNotFoundException e) {
// this is a no-op, and all children can be skipped
return deleteOps;
}
deleteOps++;
}
// now go through the children
for (RegistryPathStatus status : entries) {
String childname = status.path;
String childpath = RegistryPathUtils.join(path, childname);
deleteOps += purge(childpath, selector, purgePolicy, callback);
}
return deleteOps;
}
use of org.apache.hadoop.fs.PathNotFoundException in project hadoop by apache.
the class TestRegistryOperations method testMkdirNoParent.
@Test
public void testMkdirNoParent() throws Throwable {
String path = ENTRY_PATH + "/missing";
try {
operations.mknode(path, false);
RegistryPathStatus stat = operations.stat(path);
fail("Got a status " + stat);
} catch (PathNotFoundException expected) {
// expected
}
}
Aggregations