Search in sources :

Example 96 with IgfsPath

use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.

the class HadoopMapReduceTest method testWholeMapReduceExecution.

/**
     * Tests whole job execution with all phases in all combination of new and old versions of API.
     * @throws Exception If fails.
     */
public void testWholeMapReduceExecution() throws Exception {
    IgfsPath inDir = new IgfsPath(PATH_INPUT);
    igfs.mkdirs(inDir);
    IgfsPath inFile = new IgfsPath(inDir, HadoopWordCount2.class.getSimpleName() + "-input");
    generateTestFile(inFile.toString(), "red", red, "blue", blue, "green", green, "yellow", yellow);
    for (boolean[] apiMode : getApiModes()) {
        assert apiMode.length == 3;
        boolean useNewMapper = apiMode[0];
        boolean useNewCombiner = apiMode[1];
        boolean useNewReducer = apiMode[2];
        doTest(inFile, useNewMapper, useNewCombiner, useNewReducer);
    }
}
Also used : IgfsPath(org.apache.ignite.igfs.IgfsPath)

Example 97 with IgfsPath

use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.

the class HadoopCommandLineTest method testHadoopCommandLine.

/**
     * Tests Hadoop command line integration.
     */
public void testHadoopCommandLine() throws Exception {
    assertEquals(0, executeHadoopCmd("fs", "-ls", "/"));
    assertEquals(0, executeHadoopCmd("fs", "-mkdir", "/input"));
    assertEquals(0, executeHadoopCmd("fs", "-put", new File(testWorkDir, "test-data").getAbsolutePath(), "/input"));
    assertTrue(igfs.exists(new IgfsPath("/input/test-data")));
    assertEquals(0, executeHadoopCmd("jar", examplesJar.getAbsolutePath(), "wordcount", "/input", "/output"));
    IgfsPath path = new IgfsPath("/user/" + System.getProperty("user.name") + "/");
    assertTrue(igfs.exists(path));
    IgfsPath jobStatPath = null;
    for (IgfsPath jobPath : igfs.listPaths(path)) {
        assertNull(jobStatPath);
        jobStatPath = jobPath;
    }
    File locStatFile = new File(testWorkDir, "performance");
    assertEquals(0, executeHadoopCmd("fs", "-get", jobStatPath.toString() + "/performance", locStatFile.toString()));
    long evtCnt = HadoopTestUtils.simpleCheckJobStatFile(new BufferedReader(new FileReader(locStatFile)));
    //It's the minimum amount of events for job with combiner.
    assertTrue(evtCnt >= 22);
    assertTrue(igfs.exists(new IgfsPath("/output")));
    BufferedReader in = new BufferedReader(new InputStreamReader(igfs.open(new IgfsPath("/output/part-r-00000"))));
    List<String> res = new ArrayList<>();
    String line;
    while ((line = in.readLine()) != null) res.add(line);
    Collections.sort(res);
    assertEquals("[blue\t150, green\t200, red\t100, yellow\t50]", res.toString());
}
Also used : IgfsPath(org.apache.ignite.igfs.IgfsPath) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) ArrayList(java.util.ArrayList) FileReader(java.io.FileReader) File(java.io.File)

Example 98 with IgfsPath

use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.

the class HadoopTasksAllVersionsTest method testMapTask.

/**
     * Tests map task execution.
     *
     * @throws Exception If fails.
     */
@SuppressWarnings("ConstantConditions")
public void testMapTask() throws Exception {
    IgfsPath inDir = new IgfsPath(PATH_INPUT);
    igfs.mkdirs(inDir);
    IgfsPath inFile = new IgfsPath(inDir, HadoopWordCount2.class.getSimpleName() + "-input");
    URI inFileUri = URI.create(igfsScheme() + inFile.toString());
    try (PrintWriter pw = new PrintWriter(igfs.create(inFile, true))) {
        pw.println("hello0 world0");
        pw.println("world1 hello1");
    }
    HadoopFileBlock fileBlock1 = new HadoopFileBlock(HOSTS, inFileUri, 0, igfs.info(inFile).length() - 1);
    try (PrintWriter pw = new PrintWriter(igfs.append(inFile, false))) {
        pw.println("hello2 world2");
        pw.println("world3 hello3");
    }
    HadoopFileBlock fileBlock2 = new HadoopFileBlock(HOSTS, inFileUri, fileBlock1.length(), igfs.info(inFile).length() - fileBlock1.length());
    HadoopJobEx gridJob = getHadoopJob(igfsScheme() + inFile.toString(), igfsScheme() + PATH_OUTPUT);
    HadoopTaskInfo taskInfo = new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock1);
    HadoopTestTaskContext ctx = new HadoopTestTaskContext(taskInfo, gridJob);
    ctx.mockOutput().clear();
    ctx.run();
    assertEquals("hello0,1; world0,1; world1,1; hello1,1", Joiner.on("; ").join(ctx.mockOutput()));
    ctx.mockOutput().clear();
    ctx.taskInfo(new HadoopTaskInfo(HadoopTaskType.MAP, gridJob.id(), 0, 0, fileBlock2));
    ctx.run();
    assertEquals("hello2,1; world2,1; world3,1; hello3,1", Joiner.on("; ").join(ctx.mockOutput()));
}
Also used : IgfsPath(org.apache.ignite.igfs.IgfsPath) HadoopJobEx(org.apache.ignite.internal.processors.hadoop.HadoopJobEx) HadoopTaskInfo(org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo) HadoopFileBlock(org.apache.ignite.internal.processors.hadoop.HadoopFileBlock) URI(java.net.URI) PrintWriter(java.io.PrintWriter)

Example 99 with IgfsPath

use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.

the class IgniteHadoopFileSystemAbstractSelfTest method testZeroReplicationFactor.

/** @throws Exception If failed. */
public void testZeroReplicationFactor() throws Exception {
    // This test doesn't make sense for any mode except of PRIMARY.
    if (mode == PRIMARY) {
        Path igfsHome = new Path(PRIMARY_URI);
        Path file = new Path(igfsHome, "someFile");
        try (FSDataOutputStream out = fs.create(file, (short) 0)) {
            out.write(new byte[1024 * 1024]);
        }
        IgniteFileSystem igfs = grid(0).fileSystem("igfs");
        IgfsPath filePath = new IgfsPath("/someFile");
        IgfsFile fileInfo = igfs.info(filePath);
        awaitPartitionMapExchange();
        Collection<IgfsBlockLocation> locations = igfs.affinity(filePath, 0, fileInfo.length());
        assertEquals(1, locations.size());
        IgfsBlockLocation location = F.first(locations);
        assertEquals(1, location.nodeIds().size());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) IgfsPath(org.apache.ignite.igfs.IgfsPath) IgfsPath(org.apache.ignite.igfs.IgfsPath) IgniteFileSystem(org.apache.ignite.IgniteFileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) IgfsBlockLocation(org.apache.ignite.igfs.IgfsBlockLocation) IgfsFile(org.apache.ignite.igfs.IgfsFile)

Example 100 with IgfsPath

use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.

the class IgniteHadoopFileSystemLoggerSelfTest method testLogMisc.

/**
     * Test miscellaneous operations logging.
     *
     * @throws Exception If failed.
     */
@SuppressWarnings("TooBroadScope")
public void testLogMisc() throws Exception {
    IgfsLogger log = IgfsLogger.logger(ENDPOINT, IGFS_NAME, LOG_DIR, 10);
    String newFile = "/dir3/file.test";
    String file1 = "/dir3/file1.test";
    String file2 = "/dir3/file1.test";
    log.logMakeDirectory(PATH);
    log.logRename(PATH, new IgfsPath(newFile));
    log.logListDirectory(PATH, new String[] { file1, file2 });
    log.logDelete(PATH, false);
    log.close();
    checkLog(new SB().a(U.jvmPid() + d() + TYPE_DIR_MAKE + d() + PATH_STR_ESCAPED + d() + d(17)).toString(), new SB().a(U.jvmPid() + d() + TYPE_RENAME + d() + PATH_STR_ESCAPED + d() + d(15) + newFile + d(2)).toString(), new SB().a(U.jvmPid() + d() + TYPE_DIR_LIST + d() + PATH_STR_ESCAPED + d() + d(17) + file1 + DELIM_FIELD_VAL + file2).toString(), new SB().a(U.jvmPid() + d() + TYPE_DELETE + d(1) + PATH_STR_ESCAPED + d() + d(16) + 0 + d()).toString());
}
Also used : IgfsLogger(org.apache.ignite.internal.igfs.common.IgfsLogger) IgfsPath(org.apache.ignite.igfs.IgfsPath) SB(org.apache.ignite.internal.util.typedef.internal.SB)

Aggregations

IgfsPath (org.apache.ignite.igfs.IgfsPath)161 IgfsOutputStream (org.apache.ignite.igfs.IgfsOutputStream)23 IOException (java.io.IOException)22 ArrayList (java.util.ArrayList)15 IgniteCheckedException (org.apache.ignite.IgniteCheckedException)14 HashMap (java.util.HashMap)13 IgniteException (org.apache.ignite.IgniteException)13 IgniteFileSystem (org.apache.ignite.IgniteFileSystem)13 IgfsFile (org.apache.ignite.igfs.IgfsFile)13 IgfsException (org.apache.ignite.igfs.IgfsException)12 IgniteUuid (org.apache.ignite.lang.IgniteUuid)11 IgfsBlockLocation (org.apache.ignite.igfs.IgfsBlockLocation)10 IgfsInputStream (org.apache.ignite.igfs.IgfsInputStream)10 Map (java.util.Map)9 Path (org.apache.hadoop.fs.Path)9 IgfsPathNotFoundException (org.apache.ignite.igfs.IgfsPathNotFoundException)9 FileNotFoundException (java.io.FileNotFoundException)6 OutputStream (java.io.OutputStream)6 IgfsDirectoryNotEmptyException (org.apache.ignite.igfs.IgfsDirectoryNotEmptyException)6 IgfsParentNotDirectoryException (org.apache.ignite.igfs.IgfsParentNotDirectoryException)6