use of org.apache.hadoop.tools.DistCpOptions in project hadoop by apache.
the class TestOptionsParser method testAppendOption.
@Test
public void testAppendOption() {
Configuration conf = new Configuration();
Assert.assertFalse(conf.getBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), false));
Assert.assertFalse(conf.getBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false));
DistCpOptions options = OptionsParser.parse(new String[] { "-update", "-append", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
options.appendToConf(conf);
Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), false));
Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false));
// make sure -append is only valid when -update is specified
try {
OptionsParser.parse(new String[] { "-append", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
fail("Append should fail if update option is not specified");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Append is valid only with update options", e);
}
// make sure -append is invalid when skipCrc is specified
try {
OptionsParser.parse(new String[] { "-append", "-update", "-skipcrccheck", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
fail("Append should fail if skipCrc option is specified");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Append is disallowed when skipping CRC", e);
}
}
use of org.apache.hadoop.tools.DistCpOptions in project hadoop by apache.
the class TestOptionsParser method testToString.
@Test
public void testToString() {
DistCpOptions option = new DistCpOptions(new Path("abc"), new Path("xyz"));
String val = "DistCpOptions{atomicCommit=false, syncFolder=false, " + "deleteMissing=false, ignoreFailures=false, overwrite=false, " + "append=false, useDiff=false, useRdiff=false, " + "fromSnapshot=null, toSnapshot=null, " + "skipCRC=false, blocking=true, numListstatusThreads=0, maxMaps=20, " + "mapBandwidth=0.0, " + "copyStrategy='uniformsize', preserveStatus=[], " + "preserveRawXattrs=false, atomicWorkPath=null, logPath=null, " + "sourceFileListing=abc, sourcePaths=null, targetPath=xyz, " + "targetPathExists=true, filtersFile='null'}";
String optionString = option.toString();
Assert.assertEquals(val, optionString);
Assert.assertNotSame(DistCpOptionSwitch.ATOMIC_COMMIT.toString(), DistCpOptionSwitch.ATOMIC_COMMIT.name());
}
use of org.apache.hadoop.tools.DistCpOptions in project hadoop by apache.
the class TestOptionsParser method testParseWorkPath.
@Test
public void testParseWorkPath() {
DistCpOptions options = OptionsParser.parse(new String[] { "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.assertNull(options.getAtomicWorkPath());
options = OptionsParser.parse(new String[] { "-atomic", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.assertNull(options.getAtomicWorkPath());
options = OptionsParser.parse(new String[] { "-atomic", "-tmp", "hdfs://localhost:9820/work", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.assertEquals(options.getAtomicWorkPath(), new Path("hdfs://localhost:9820/work"));
try {
OptionsParser.parse(new String[] { "-tmp", "hdfs://localhost:9820/work", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.fail("work path was allowed without -atomic switch");
} catch (IllegalArgumentException ignore) {
}
}
use of org.apache.hadoop.tools.DistCpOptions in project hadoop by apache.
the class TestOptionsParser method testParsebandwidth.
@Test
public void testParsebandwidth() {
DistCpOptions options = OptionsParser.parse(new String[] { "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.assertEquals(options.getMapBandwidth(), 0, DELTA);
options = OptionsParser.parse(new String[] { "-bandwidth", "11.2", "hdfs://localhost:9820/source/first", "hdfs://localhost:9820/target/" });
Assert.assertEquals(options.getMapBandwidth(), 11.2, DELTA);
}
use of org.apache.hadoop.tools.DistCpOptions in project hive by apache.
the class Hadoop23Shims method runDistCp.
@Override
public boolean runDistCp(List<Path> srcPaths, Path dst, Configuration conf) throws IOException {
DistCpOptions options = new DistCpOptions.Builder(srcPaths, dst).withSyncFolder(true).withCRC(true).preserve(FileAttribute.BLOCKSIZE).build();
// Creates the command-line parameters for distcp
List<String> params = constructDistCpParams(srcPaths, dst, conf);
try {
conf.setBoolean("mapred.mapper.new-api", true);
DistCp distcp = new DistCp(conf, options);
// added by HADOOP-10459
if (distcp.run(params.toArray(new String[0])) == 0) {
return true;
} else {
return false;
}
} catch (Exception e) {
throw new IOException("Cannot execute DistCp process: " + e, e);
} finally {
conf.setBoolean("mapred.mapper.new-api", false);
}
}
Aggregations