use of org.apache.commons.lang.text.StrBuilder in project hadoop by apache.
the class PlanCommand method execute.
/**
* Runs the plan command. This command can be run with various options like
* <p>
* -plan -node IP -plan -node hostName -plan -node DatanodeUUID
*
* @param cmd - CommandLine
* @throws Exception
*/
@Override
public void execute(CommandLine cmd) throws Exception {
StrBuilder result = new StrBuilder();
String outputLine = "";
LOG.debug("Processing Plan Command.");
Preconditions.checkState(cmd.hasOption(DiskBalancerCLI.PLAN));
verifyCommandOptions(DiskBalancerCLI.PLAN, cmd);
if (cmd.getOptionValue(DiskBalancerCLI.PLAN) == null) {
throw new IllegalArgumentException("A node name is required to create a" + " plan.");
}
if (cmd.hasOption(DiskBalancerCLI.BANDWIDTH)) {
this.bandwidth = Integer.parseInt(cmd.getOptionValue(DiskBalancerCLI.BANDWIDTH));
}
if (cmd.hasOption(DiskBalancerCLI.MAXERROR)) {
this.maxError = Integer.parseInt(cmd.getOptionValue(DiskBalancerCLI.MAXERROR));
}
readClusterInfo(cmd);
String output = null;
if (cmd.hasOption(DiskBalancerCLI.OUTFILE)) {
output = cmd.getOptionValue(DiskBalancerCLI.OUTFILE);
}
setOutputPath(output);
// -plan nodename is the command line argument.
DiskBalancerDataNode node = getNode(cmd.getOptionValue(DiskBalancerCLI.PLAN));
if (node == null) {
throw new IllegalArgumentException("Unable to find the specified node. " + cmd.getOptionValue(DiskBalancerCLI.PLAN));
}
this.thresholdPercentage = getThresholdPercentage(cmd);
LOG.debug("threshold Percentage is {}", this.thresholdPercentage);
setNodesToProcess(node);
populatePathNames(node);
NodePlan plan = null;
List<NodePlan> plans = getCluster().computePlan(this.thresholdPercentage);
setPlanParams(plans);
if (plans.size() > 0) {
plan = plans.get(0);
}
try (FSDataOutputStream beforeStream = create(String.format(DiskBalancerCLI.BEFORE_TEMPLATE, cmd.getOptionValue(DiskBalancerCLI.PLAN)))) {
beforeStream.write(getCluster().toJson().getBytes(StandardCharsets.UTF_8));
}
try {
if (plan != null && plan.getVolumeSetPlans().size() > 0) {
outputLine = String.format("Writing plan to:");
recordOutput(result, outputLine);
final String planFileName = String.format(DiskBalancerCLI.PLAN_TEMPLATE, cmd.getOptionValue(DiskBalancerCLI.PLAN));
final String planFileFullName = new Path(getOutputPath(), planFileName).toString();
recordOutput(result, planFileFullName);
try (FSDataOutputStream planStream = create(planFileName)) {
planStream.write(plan.toJson().getBytes(StandardCharsets.UTF_8));
}
} else {
outputLine = String.format("No plan generated. DiskBalancing not needed for node: %s" + " threshold used: %s", cmd.getOptionValue(DiskBalancerCLI.PLAN), this.thresholdPercentage);
recordOutput(result, outputLine);
}
if (cmd.hasOption(DiskBalancerCLI.VERBOSE) && plans.size() > 0) {
printToScreen(plans);
}
} catch (Exception e) {
final String errMsg = "Errors while recording the output of plan command.";
LOG.error(errMsg, e);
result.appendln(errMsg);
result.appendln(Throwables.getStackTraceAsString(e));
}
getPrintStream().print(result.toString());
}
use of org.apache.commons.lang.text.StrBuilder in project hadoop by apache.
the class ReportCommand method execute.
@Override
public void execute(CommandLine cmd) throws Exception {
StrBuilder result = new StrBuilder();
String outputLine = "Processing report command";
recordOutput(result, outputLine);
Preconditions.checkState(cmd.hasOption(DiskBalancerCLI.REPORT));
verifyCommandOptions(DiskBalancerCLI.REPORT, cmd);
readClusterInfo(cmd);
final String nodeFormat = "%d/%d %s[%s:%d] - <%s>: %d volumes with node data density %.2f.";
final String nodeFormatWithoutSequence = "%s[%s:%d] - <%s>: %d volumes with node data density %.2f.";
final String volumeFormat = "[%s: volume-%s] - %.2f used: %d/%d, %.2f free: %d/%d, " + "isFailed: %s, isReadOnly: %s, isSkip: %s, isTransient: %s.";
if (cmd.hasOption(DiskBalancerCLI.NODE)) {
/*
* Reporting volume information for specific DataNode(s)
*/
handleNodeReport(cmd, result, nodeFormatWithoutSequence, volumeFormat);
} else {
// handle TOP
/*
* Reporting volume information for top X DataNode(s)
*/
handleTopReport(cmd, result, nodeFormat);
}
getPrintStream().println(result.toString());
}
use of org.apache.commons.lang.text.StrBuilder in project gradle by gradle.
the class ClassFileExtractionManager method extractClassFile.
private boolean extractClassFile(final String className) {
boolean classFileExtracted = false;
final File extractedClassFile = tempFile();
final String classFileName = new StrBuilder().append(className).append(".class").toString();
final String classNamePackage = classNamePackage(className);
final Set<File> packageJarFiles = packageJarFilesMappings.get(classNamePackage);
File classFileSourceJar = null;
if (packageJarFiles != null && !packageJarFiles.isEmpty()) {
final Iterator<File> packageJarFilesIt = packageJarFiles.iterator();
while (!classFileExtracted && packageJarFilesIt.hasNext()) {
final File jarFile = packageJarFilesIt.next();
try {
classFileExtracted = JarUtil.extractZipEntry(jarFile, classFileName, extractedClassFile);
if (classFileExtracted) {
classFileSourceJar = jarFile;
}
} catch (IOException e) {
throw new GradleException("failed to extract class file from jar (" + jarFile + ")", e);
}
}
if (classFileExtracted) {
LOGGER.debug("extracted class {} from {}", className, classFileSourceJar.getName());
extractedJarClasses.put(className, extractedClassFile);
}
}
return classFileExtracted;
}
use of org.apache.commons.lang.text.StrBuilder in project midpoint by Evolveum.
the class ValuePolicyProcessor method generateAttempt.
private String generateAttempt(StringPolicyType policy, int defaultLength, boolean generateMinimalSize, OperationResult result) {
// if (policy.getLimitations() != null &&
// policy.getLimitations().getMinLength() != null){
// generateMinimalSize = true;
// }
// setup default values where missing
// PasswordPolicyUtils.normalize(pp);
// Optimize usage of limits ass hashmap of limitas and key is set of
// valid chars for each limitation
Map<StringLimitType, List<String>> lims = new HashMap<StringLimitType, List<String>>();
int minLen = defaultLength;
int maxLen = defaultLength;
int unique = defaultLength / 2;
if (policy != null) {
for (StringLimitType l : policy.getLimitations().getLimit()) {
if (null != l.getCharacterClass().getValue()) {
lims.put(l, StringPolicyUtils.stringTokenizer(l.getCharacterClass().getValue()));
} else {
lims.put(l, StringPolicyUtils.stringTokenizer(StringPolicyUtils.collectCharacterClass(policy.getCharacterClass(), l.getCharacterClass().getRef())));
}
}
// Get global limitations
minLen = policy.getLimitations().getMinLength() == null ? 0 : policy.getLimitations().getMinLength().intValue();
if (minLen != 0 && minLen > defaultLength) {
defaultLength = minLen;
}
maxLen = (policy.getLimitations().getMaxLength() == null ? 0 : policy.getLimitations().getMaxLength().intValue());
unique = policy.getLimitations().getMinUniqueChars() == null ? minLen : policy.getLimitations().getMinUniqueChars().intValue();
}
// test correctness of definition
if (unique > minLen) {
minLen = unique;
OperationResult reportBug = new OperationResult("Global limitation check");
reportBug.recordWarning("There is more required uniq characters then definied minimum. Raise minimum to number of required uniq chars.");
}
if (minLen == 0 && maxLen == 0) {
minLen = defaultLength;
maxLen = defaultLength;
generateMinimalSize = true;
}
if (maxLen == 0) {
if (minLen > defaultLength) {
maxLen = minLen;
} else {
maxLen = defaultLength;
}
}
// Initialize generator
StringBuilder password = new StringBuilder();
/*
* ********************************** Try to find best characters to be
* first in password
*/
Map<StringLimitType, List<String>> mustBeFirst = new HashMap<StringLimitType, List<String>>();
for (StringLimitType l : lims.keySet()) {
if (l.isMustBeFirst() != null && l.isMustBeFirst()) {
mustBeFirst.put(l, lims.get(l));
}
}
// If any limitation was found to be first
if (!mustBeFirst.isEmpty()) {
Map<Integer, List<String>> posibleFirstChars = cardinalityCounter(mustBeFirst, null, false, false, result);
int intersectionCardinality = mustBeFirst.keySet().size();
List<String> intersectionCharacters = posibleFirstChars.get(intersectionCardinality);
// If no intersection was found then raise error
if (null == intersectionCharacters || intersectionCharacters.size() == 0) {
result.recordFatalError("No intersection for required first character sets in value policy:" + policy.getDescription());
// Log error
if (LOGGER.isErrorEnabled()) {
LOGGER.error("Unable to generate value for " + getPath() + ": No intersection for required first character sets in value policy: [" + policy.getDescription() + "] following character limitation and sets are used:");
for (StringLimitType l : mustBeFirst.keySet()) {
StrBuilder tmp = new StrBuilder();
tmp.appendSeparator(", ");
tmp.appendAll(mustBeFirst.get(l));
LOGGER.error("L:" + l.getDescription() + " -> [" + tmp + "]");
}
}
// EXIT
return null;
} else {
if (LOGGER.isDebugEnabled()) {
StrBuilder tmp = new StrBuilder();
tmp.appendSeparator(", ");
tmp.appendAll(intersectionCharacters);
LOGGER.trace("Generate first character intersection items [" + tmp + "] into " + getPath() + ".");
}
// Generate random char into password from intersection
password.append(intersectionCharacters.get(RAND.nextInt(intersectionCharacters.size())));
}
}
/*
* ************************************** Generate rest to fulfill
* minimal criteria
*/
boolean uniquenessReached = false;
// Count cardinality of elements
Map<Integer, List<String>> chars;
for (int i = 0; i < minLen; i++) {
// Check if still unique chars are needed
if (password.length() >= unique) {
uniquenessReached = true;
}
// Find all usable characters
chars = cardinalityCounter(lims, StringPolicyUtils.stringTokenizer(password.toString()), false, uniquenessReached, result);
// If something goes badly then go out
if (null == chars) {
return null;
}
if (chars.isEmpty()) {
LOGGER.trace("Minimal criterias was met. No more characters");
break;
}
// Find lowest possible cardinality and then generate char
for (int card = 1; card < lims.keySet().size(); card++) {
if (chars.containsKey(card)) {
List<String> validChars = chars.get(card);
password.append(validChars.get(RAND.nextInt(validChars.size())));
break;
}
}
}
// test if maximum is not exceeded
if (password.length() > maxLen) {
result.recordFatalError("Unable to meet minimal criteria and not exceed maximxal size of " + getPath() + ".");
return null;
}
for (int i = 0; i < minLen; i++) {
// test if max is reached
if (password.length() == maxLen) {
// no more characters maximal size is reached
break;
}
if (password.length() >= minLen && generateMinimalSize) {
// no more characters are needed
break;
}
// Check if still unique chars are needed
if (password.length() >= unique) {
uniquenessReached = true;
}
// find all usable characters
chars = cardinalityCounter(lims, StringPolicyUtils.stringTokenizer(password.toString()), true, uniquenessReached, result);
// If something goes badly then go out
if (null == chars) {
// we hope this never happend.
result.recordFatalError("No valid characters to generate, but no all limitation are reached");
return null;
}
// our work
if (chars.isEmpty()) {
if (i == 0) {
password.append(RandomStringUtils.randomAlphanumeric(minLen));
}
break;
// if (!StringUtils.isBlank(password.toString()) &&
// password.length() >= minLen) {
// break;
// }
// check uf this is a firs cycle and if we need to user some
// default (alphanum) character class.
}
// Find lowest possible cardinality and then generate char
for (int card = 1; card <= lims.keySet().size(); card++) {
if (chars.containsKey(card)) {
List<String> validChars = chars.get(card);
password.append(validChars.get(RAND.nextInt(validChars.size())));
break;
}
}
}
if (password.length() < minLen) {
result.recordFatalError("Unable to generate value for " + getPath() + " and meet minimal size of " + getPath() + ". Actual lenght: " + password.length() + ", required: " + minLen);
LOGGER.trace("Unable to generate value for " + getPath() + " and meet minimal size of " + getPath() + ". Actual lenght: {}, required: {}", password.length(), minLen);
return null;
}
result.recordSuccess();
// Shuffle output to solve pattern like output
StrBuilder sb = new StrBuilder(password.substring(0, 1));
List<String> shuffleBuffer = StringPolicyUtils.stringTokenizer(password.substring(1));
Collections.shuffle(shuffleBuffer);
sb.appendAll(shuffleBuffer);
return sb.toString();
}
use of org.apache.commons.lang.text.StrBuilder in project hadoop by apache.
the class TestDFSAdmin method scanIntoString.
private static String scanIntoString(final ByteArrayOutputStream baos) {
final StrBuilder sb = new StrBuilder();
final Scanner scanner = new Scanner(baos.toString());
while (scanner.hasNextLine()) {
sb.appendln(scanner.nextLine());
}
scanner.close();
return sb.toString();
}
Aggregations