Search in sources :

Example 41 with FileWriter

use of java.io.FileWriter in project hadoop by apache.

the class TestSysInfoLinux method parsingProcMemFile.

/**
   * Test parsing /proc/meminfo
   * @throws IOException
   */
@Test
public void parsingProcMemFile() throws IOException {
    long memTotal = 4058864L;
    long memFree = 99632L;
    long inactive = 567732L;
    long swapTotal = 2096472L;
    long swapFree = 1818480L;
    int nrHugePages = 10;
    File tempFile = new File(FAKE_MEMFILE);
    tempFile.deleteOnExit();
    FileWriter fWriter = new FileWriter(FAKE_MEMFILE);
    fWriter.write(String.format(MEMINFO_FORMAT, memTotal, memFree, inactive, swapTotal, swapFree, nrHugePages));
    fWriter.close();
    assertEquals(plugin.getAvailablePhysicalMemorySize(), 1024L * (memFree + inactive));
    assertEquals(plugin.getAvailableVirtualMemorySize(), 1024L * (memFree + inactive + swapFree));
    assertEquals(plugin.getPhysicalMemorySize(), 1024L * (memTotal - (nrHugePages * 2048)));
    assertEquals(plugin.getVirtualMemorySize(), 1024L * (memTotal - (nrHugePages * 2048) + swapTotal));
}
Also used : FileWriter(java.io.FileWriter) File(java.io.File) Test(org.junit.Test)

Example 42 with FileWriter

use of java.io.FileWriter in project hadoop by apache.

the class TestSysInfoLinux method parsingProcMemFile2.

/**
   * Test parsing /proc/meminfo with Inactive(file) present
   * @throws IOException
   */
@Test
public void parsingProcMemFile2() throws IOException {
    long memTotal = 131403836L;
    long memFree = 11257036L;
    long inactive = 27396032L;
    long inactiveFile = 21010696L;
    long swapTotal = 31981552L;
    long swapFree = 1818480L;
    long hardwareCorrupt = 31960904L;
    int nrHugePages = 10;
    File tempFile = new File(FAKE_MEMFILE);
    tempFile.deleteOnExit();
    FileWriter fWriter = new FileWriter(FAKE_MEMFILE);
    fWriter.write(String.format(MEMINFO_FORMAT_2, memTotal, memFree, inactive, inactiveFile, swapTotal, swapFree, hardwareCorrupt, nrHugePages));
    fWriter.close();
    assertEquals(plugin.getAvailablePhysicalMemorySize(), 1024L * (memFree + inactiveFile));
    assertFalse(plugin.getAvailablePhysicalMemorySize() == 1024L * (memFree + inactive));
    assertEquals(plugin.getAvailableVirtualMemorySize(), 1024L * (memFree + inactiveFile + swapFree));
    assertEquals(plugin.getPhysicalMemorySize(), 1024L * (memTotal - hardwareCorrupt - (nrHugePages * 2048)));
    assertEquals(plugin.getVirtualMemorySize(), 1024L * (memTotal - hardwareCorrupt - (nrHugePages * 2048) + swapTotal));
}
Also used : FileWriter(java.io.FileWriter) File(java.io.File) Test(org.junit.Test)

Example 43 with FileWriter

use of java.io.FileWriter in project hadoop by apache.

the class TestHttpFSServer method createHttpFSServer.

private void createHttpFSServer(boolean addDelegationTokenAuthHandler) throws Exception {
    File homeDir = TestDirHelper.getTestDir();
    Assert.assertTrue(new File(homeDir, "conf").mkdir());
    Assert.assertTrue(new File(homeDir, "log").mkdir());
    Assert.assertTrue(new File(homeDir, "temp").mkdir());
    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
    File secretFile = new File(new File(homeDir, "conf"), "secret");
    Writer w = new FileWriter(secretFile);
    w.write("secret");
    w.close();
    //HDFS configuration
    File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
    hadoopConfDir.mkdirs();
    Configuration hdfsConf = TestHdfsHelper.getHdfsConf();
    // Http Server's conf should be based on HDFS's conf
    Configuration conf = new Configuration(hdfsConf);
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
    File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
    OutputStream os = new FileOutputStream(hdfsSite);
    conf.writeXml(os);
    os.close();
    //HTTPFS configuration
    conf = new Configuration(false);
    if (addDelegationTokenAuthHandler) {
        conf.set("httpfs.authentication.type", HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
    }
    conf.set("httpfs.services.ext", MockGroups.class.getName());
    conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.getHadoopUserGroups(HadoopUsersConfTestHelper.getHadoopUsers()[0])[0]);
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups", HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
    conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
    os = new FileOutputStream(httpfsSite);
    conf.writeXml(os);
    os.close();
    ClassLoader cl = Thread.currentThread().getContextClassLoader();
    URL url = cl.getResource("webapp");
    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
    Server server = TestJettyHelper.getJettyServer();
    server.setHandler(context);
    server.start();
    if (addDelegationTokenAuthHandler) {
        HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
    }
}
Also used : WebAppContext(org.eclipse.jetty.webapp.WebAppContext) Configuration(org.apache.hadoop.conf.Configuration) Server(org.eclipse.jetty.server.Server) FileWriter(java.io.FileWriter) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File) Writer(java.io.Writer) FileWriter(java.io.FileWriter) URL(java.net.URL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL)

Example 44 with FileWriter

use of java.io.FileWriter in project hadoop by apache.

the class TestNativeIO method testSetFilePointer.

@Test(timeout = 30000)
public void testSetFilePointer() throws Exception {
    assumeWindows();
    LOG.info("Set a file pointer on Windows");
    try {
        File testfile = new File(TEST_DIR, "testSetFilePointer");
        assertTrue("Create test subject", testfile.exists() || testfile.createNewFile());
        FileWriter writer = new FileWriter(testfile);
        try {
            for (int i = 0; i < 200; i++) if (i < 100)
                writer.write('a');
            else
                writer.write('b');
            writer.flush();
        } catch (Exception writerException) {
            fail("Got unexpected exception: " + writerException.getMessage());
        } finally {
            writer.close();
        }
        FileDescriptor fd = NativeIO.Windows.createFile(testfile.getCanonicalPath(), NativeIO.Windows.GENERIC_READ, NativeIO.Windows.FILE_SHARE_READ | NativeIO.Windows.FILE_SHARE_WRITE | NativeIO.Windows.FILE_SHARE_DELETE, NativeIO.Windows.OPEN_EXISTING);
        NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
        FileReader reader = new FileReader(fd);
        try {
            int c = reader.read();
            assertTrue("Unexpected character: " + c, c == 'b');
        } catch (Exception readerException) {
            fail("Got unexpected exception: " + readerException.getMessage());
        } finally {
            reader.close();
        }
    } catch (Exception e) {
        fail("Got unexpected exception: " + e.getMessage());
    }
}
Also used : FileWriter(java.io.FileWriter) FileReader(java.io.FileReader) RandomAccessFile(java.io.RandomAccessFile) File(java.io.File) IOException(java.io.IOException) FileDescriptor(java.io.FileDescriptor) Test(org.junit.Test)

Example 45 with FileWriter

use of java.io.FileWriter in project hadoop by apache.

the class TestLoadGenerator method testLoadGenerator.

/** Test if the load generator works fine */
@Test
public void testLoadGenerator() throws Exception {
    final String TEST_SPACE_ROOT = "/test";
    final String SCRIPT_TEST_DIR = OUT_DIR.getAbsolutePath();
    String script = SCRIPT_TEST_DIR + "/" + "loadgenscript";
    String script2 = SCRIPT_TEST_DIR + "/" + "loadgenscript2";
    File scriptFile1 = new File(script);
    File scriptFile2 = new File(script2);
    FileWriter writer = new FileWriter(DIR_STRUCTURE_FILE);
    writer.write(DIR_STRUCTURE_FIRST_LINE + "\n");
    writer.write(DIR_STRUCTURE_SECOND_LINE + "\n");
    writer.close();
    writer = new FileWriter(FILE_STRUCTURE_FILE);
    writer.write(FILE_STRUCTURE_FIRST_LINE + "\n");
    writer.write(FILE_STRUCTURE_SECOND_LINE + "\n");
    writer.close();
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(3).build();
    cluster.waitActive();
    try {
        DataGenerator dg = new DataGenerator();
        dg.setConf(CONF);
        String[] args = new String[] { "-inDir", OUT_DIR.getAbsolutePath(), "-root", TEST_SPACE_ROOT };
        assertEquals(0, dg.run(args));
        final int READ_PROBABILITY = 1;
        final int WRITE_PROBABILITY = 3;
        final int MAX_DELAY_BETWEEN_OPS = 7;
        final int NUM_OF_THREADS = 9;
        final int START_TIME = 11;
        final int ELAPSED_TIME = 13;
        LoadGenerator lg = new LoadGenerator();
        lg.setConf(CONF);
        args = new String[] { "-readProbability", "0.3", "-writeProbability", "0.3", "-root", TEST_SPACE_ROOT, "-maxDelayBetweenOps", "0", "-numOfThreads", "1", "-startTime", Long.toString(Time.now()), "-elapsedTime", "10" };
        assertEquals(0, lg.run(args));
        String oldArg = args[READ_PROBABILITY];
        args[READ_PROBABILITY] = "1.1";
        assertEquals(-1, lg.run(args));
        args[READ_PROBABILITY] = "-1.1";
        assertEquals(-1, lg.run(args));
        args[READ_PROBABILITY] = oldArg;
        oldArg = args[WRITE_PROBABILITY];
        args[WRITE_PROBABILITY] = "1.1";
        assertEquals(-1, lg.run(args));
        args[WRITE_PROBABILITY] = "-1.1";
        assertEquals(-1, lg.run(args));
        args[WRITE_PROBABILITY] = "0.9";
        assertEquals(-1, lg.run(args));
        args[READ_PROBABILITY] = oldArg;
        oldArg = args[MAX_DELAY_BETWEEN_OPS];
        args[MAX_DELAY_BETWEEN_OPS] = "1.x1";
        assertEquals(-1, lg.run(args));
        args[MAX_DELAY_BETWEEN_OPS] = oldArg;
        oldArg = args[MAX_DELAY_BETWEEN_OPS];
        args[MAX_DELAY_BETWEEN_OPS] = "1.x1";
        assertEquals(-1, lg.run(args));
        args[MAX_DELAY_BETWEEN_OPS] = oldArg;
        oldArg = args[NUM_OF_THREADS];
        args[NUM_OF_THREADS] = "-1";
        assertEquals(-1, lg.run(args));
        args[NUM_OF_THREADS] = oldArg;
        oldArg = args[START_TIME];
        args[START_TIME] = "-1";
        assertEquals(-1, lg.run(args));
        args[START_TIME] = oldArg;
        oldArg = args[ELAPSED_TIME];
        args[ELAPSED_TIME] = "-1";
        assertEquals(-1, lg.run(args));
        args[ELAPSED_TIME] = oldArg;
        // test scripted operation
        // Test with good script
        FileWriter fw = new FileWriter(scriptFile1);
        fw.write("2 .22 .33\n");
        fw.write("3 .10 .6\n");
        fw.write("6 0 .7\n");
        fw.close();
        String[] scriptArgs = new String[] { "-root", TEST_SPACE_ROOT, "-maxDelayBetweenOps", "0", "-numOfThreads", "10", "-startTime", Long.toString(Time.now()), "-scriptFile", script };
        assertEquals(0, lg.run(scriptArgs));
        // Test with bad script
        fw = new FileWriter(scriptFile2);
        fw.write("2 .22 .33\n");
        fw.write("3 blah blah blah .6\n");
        fw.write("6 0 .7\n");
        fw.close();
        scriptArgs[scriptArgs.length - 1] = script2;
        assertEquals(-1, lg.run(scriptArgs));
    } finally {
        cluster.shutdown();
        DIR_STRUCTURE_FILE.delete();
        FILE_STRUCTURE_FILE.delete();
        scriptFile1.delete();
        scriptFile2.delete();
    }
}
Also used : MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) FileWriter(java.io.FileWriter) File(java.io.File) Test(org.junit.Test)

Aggregations

FileWriter (java.io.FileWriter)1776 File (java.io.File)1056 IOException (java.io.IOException)741 BufferedWriter (java.io.BufferedWriter)705 PrintWriter (java.io.PrintWriter)285 Test (org.junit.Test)222 Writer (java.io.Writer)162 FileReader (java.io.FileReader)121 BufferedReader (java.io.BufferedReader)107 ArrayList (java.util.ArrayList)101 FileNotFoundException (java.io.FileNotFoundException)68 Date (java.util.Date)61 Properties (java.util.Properties)61 FileOutputStream (java.io.FileOutputStream)58 HashMap (java.util.HashMap)53 StringWriter (java.io.StringWriter)50 Path (org.apache.hadoop.fs.Path)49 FileInputStream (java.io.FileInputStream)47 Map (java.util.Map)33 OutputStreamWriter (java.io.OutputStreamWriter)31