Search in sources :

Example 71 with Date

use of java.util.Date in project hadoop by apache.

the class RollingFileSystemSinkTestBase method readLogFile.

/**
   * Read the log files at the target path and return the contents as a single
   * string. This method will assert that the correct number of files is found.
   *
   * @param path the target path
   * @param then when the test method began. Used to find the log directory in
   * the case that the test run crosses the top of the hour.
   * @param count the number of log files to expect
   * @return
   * @throws IOException
   * @throws URISyntaxException
   */
protected String readLogFile(String path, String then, int count) throws IOException, URISyntaxException {
    final String now = DATE_FORMAT.format(new Date()) + "00";
    final String logFile = getLogFilename();
    FileSystem fs = FileSystem.get(new URI(path), new Configuration());
    StringBuilder metrics = new StringBuilder();
    boolean found = false;
    for (FileStatus status : fs.listStatus(new Path(path))) {
        Path logDir = status.getPath();
        // the test started and the current time.  Anything else can be ignored.
        if (now.equals(logDir.getName()) || then.equals(logDir.getName())) {
            readLogData(fs, findMostRecentLogFile(fs, new Path(logDir, logFile)), metrics);
            assertFileCount(fs, logDir, count);
            found = true;
        }
    }
    assertTrue("No valid log directories found", found);
    return metrics.toString();
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) Configuration(org.apache.hadoop.conf.Configuration) SubsetConfiguration(org.apache.commons.configuration2.SubsetConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) URI(java.net.URI) Date(java.util.Date)

Example 72 with Date

use of java.util.Date in project hadoop by apache.

the class RollingFileSystemSinkTestBase method getNowNotTopOfHour.

/**
   * Return a calendar based on the current time.  If the current time is very
   * near the top of the hour (less than 20 seconds), sleep until the new hour
   * before returning a new Calendar instance.
   *
   * @return a new Calendar instance that isn't near the top of the hour
   * @throws InterruptedException if interrupted while sleeping
   */
public Calendar getNowNotTopOfHour() throws InterruptedException {
    Calendar now = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
    // so that we don't get confused by the directory rolling
    if ((now.get(Calendar.MINUTE) == 59) && (now.get(Calendar.SECOND) > 40)) {
        Thread.sleep((61 - now.get(Calendar.SECOND)) * 1000L);
        now.setTime(new Date());
    }
    return now;
}
Also used : Calendar(java.util.Calendar) Date(java.util.Date)

Example 73 with Date

use of java.util.Date in project hadoop by apache.

the class TestSetTimes method testTimesAtClose.

/**
   * Tests mod time change at close in DFS.
   */
@Test
public void testTimesAtClose() throws IOException {
    Configuration conf = new HdfsConfiguration();
    // 2s
    final int MAX_IDLE_TIME = 2000;
    int replicas = 1;
    // parameter initialization
    conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
    conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
    conf.setInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY, 50);
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes).build();
    cluster.waitActive();
    InetSocketAddress addr = new InetSocketAddress("localhost", cluster.getNameNodePort());
    DFSClient client = new DFSClient(addr, conf);
    DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
    assertEquals("Number of Datanodes ", numDatanodes, info.length);
    FileSystem fileSys = cluster.getFileSystem();
    assertTrue(fileSys instanceof DistributedFileSystem);
    try {
        // create a new file and write to it
        Path file1 = new Path("/simple.dat");
        FSDataOutputStream stm = writeFile(fileSys, file1, replicas);
        System.out.println("Created and wrote file simple.dat");
        FileStatus statBeforeClose = fileSys.getFileStatus(file1);
        long mtimeBeforeClose = statBeforeClose.getModificationTime();
        String mdateBeforeClose = dateForm.format(new Date(mtimeBeforeClose));
        System.out.println("mtime on " + file1 + " before close is " + mdateBeforeClose + " (" + mtimeBeforeClose + ")");
        assertTrue(mtimeBeforeClose != 0);
        //close file after writing
        stm.close();
        System.out.println("Closed file.");
        FileStatus statAfterClose = fileSys.getFileStatus(file1);
        long mtimeAfterClose = statAfterClose.getModificationTime();
        String mdateAfterClose = dateForm.format(new Date(mtimeAfterClose));
        System.out.println("mtime on " + file1 + " after close is " + mdateAfterClose + " (" + mtimeAfterClose + ")");
        assertTrue(mtimeAfterClose != 0);
        assertTrue(mtimeBeforeClose != mtimeAfterClose);
        cleanupFile(fileSys, file1);
    } catch (IOException e) {
        info = client.datanodeReport(DatanodeReportType.ALL);
        printDatanodeReport(info);
        throw e;
    } finally {
        fileSys.close();
        cluster.shutdown();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) InetSocketAddress(java.net.InetSocketAddress) IOException(java.io.IOException) Date(java.util.Date) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Example 74 with Date

use of java.util.Date in project hadoop by apache.

the class TestSetTimes method testTimes.

/**
   * Tests mod & access time in DFS.
   */
@Test
public void testTimes() throws IOException {
    Configuration conf = new HdfsConfiguration();
    // 2s
    final int MAX_IDLE_TIME = 2000;
    conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
    conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes).build();
    cluster.waitActive();
    final int nnport = cluster.getNameNodePort();
    InetSocketAddress addr = new InetSocketAddress("localhost", cluster.getNameNodePort());
    DFSClient client = new DFSClient(addr, conf);
    DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
    assertEquals("Number of Datanodes ", numDatanodes, info.length);
    FileSystem fileSys = cluster.getFileSystem();
    int replicas = 1;
    assertTrue(fileSys instanceof DistributedFileSystem);
    try {
        //
        // create file and record atime/mtime
        //
        System.out.println("Creating testdir1 and testdir1/test1.dat.");
        Path dir1 = new Path("testdir1");
        Path file1 = new Path(dir1, "test1.dat");
        FSDataOutputStream stm = writeFile(fileSys, file1, replicas);
        FileStatus stat = fileSys.getFileStatus(file1);
        long atimeBeforeClose = stat.getAccessTime();
        String adate = dateForm.format(new Date(atimeBeforeClose));
        System.out.println("atime on " + file1 + " before close is " + adate + " (" + atimeBeforeClose + ")");
        assertTrue(atimeBeforeClose != 0);
        stm.close();
        stat = fileSys.getFileStatus(file1);
        long atime1 = stat.getAccessTime();
        long mtime1 = stat.getModificationTime();
        adate = dateForm.format(new Date(atime1));
        String mdate = dateForm.format(new Date(mtime1));
        System.out.println("atime on " + file1 + " is " + adate + " (" + atime1 + ")");
        System.out.println("mtime on " + file1 + " is " + mdate + " (" + mtime1 + ")");
        assertTrue(atime1 != 0);
        //
        // record dir times
        //
        stat = fileSys.getFileStatus(dir1);
        long mdir1 = stat.getAccessTime();
        assertTrue(mdir1 == 0);
        // set the access time to be one day in the past
        long atime2 = atime1 - (24L * 3600L * 1000L);
        fileSys.setTimes(file1, -1, atime2);
        // check new access time on file
        stat = fileSys.getFileStatus(file1);
        long atime3 = stat.getAccessTime();
        String adate3 = dateForm.format(new Date(atime3));
        System.out.println("new atime on " + file1 + " is " + adate3 + " (" + atime3 + ")");
        assertTrue(atime2 == atime3);
        assertTrue(mtime1 == stat.getModificationTime());
        // set the modification time to be 1 hour in the past
        long mtime2 = mtime1 - (3600L * 1000L);
        fileSys.setTimes(file1, mtime2, -1);
        // check new modification time on file
        stat = fileSys.getFileStatus(file1);
        long mtime3 = stat.getModificationTime();
        String mdate3 = dateForm.format(new Date(mtime3));
        System.out.println("new mtime on " + file1 + " is " + mdate3 + " (" + mtime3 + ")");
        assertTrue(atime2 == stat.getAccessTime());
        assertTrue(mtime2 == mtime3);
        long mtime4 = Time.now() - (3600L * 1000L);
        long atime4 = Time.now();
        fileSys.setTimes(dir1, mtime4, atime4);
        // check new modification time on file
        stat = fileSys.getFileStatus(dir1);
        assertTrue("Not matching the modification times", mtime4 == stat.getModificationTime());
        assertTrue("Not matching the access times", atime4 == stat.getAccessTime());
        Path nonExistingDir = new Path(dir1, "/nonExistingDir/");
        try {
            fileSys.setTimes(nonExistingDir, mtime4, atime4);
            fail("Expecting FileNotFoundException");
        } catch (FileNotFoundException e) {
            assertTrue(e.getMessage().contains("File/Directory " + nonExistingDir.toString() + " does not exist."));
        }
        // shutdown cluster and restart
        cluster.shutdown();
        try {
            Thread.sleep(2 * MAX_IDLE_TIME);
        } catch (InterruptedException e) {
        }
        cluster = new MiniDFSCluster.Builder(conf).nameNodePort(nnport).format(false).build();
        cluster.waitActive();
        fileSys = cluster.getFileSystem();
        // verify that access times and modification times persist after a
        // cluster restart.
        System.out.println("Verifying times after cluster restart");
        stat = fileSys.getFileStatus(file1);
        assertTrue(atime2 == stat.getAccessTime());
        assertTrue(mtime3 == stat.getModificationTime());
        cleanupFile(fileSys, file1);
        cleanupFile(fileSys, dir1);
    } catch (IOException e) {
        info = client.datanodeReport(DatanodeReportType.ALL);
        printDatanodeReport(info);
        throw e;
    } finally {
        fileSys.close();
        cluster.shutdown();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) InetSocketAddress(java.net.InetSocketAddress) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) Date(java.util.Date) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Example 75 with Date

use of java.util.Date in project hadoop by apache.

the class FSNamesystem method getTopUserOpCounts.

// FSNamesystemMBean
@Override
public String getTopUserOpCounts() {
    if (!topConf.isEnabled) {
        return null;
    }
    Date now = new Date();
    final List<RollingWindowManager.TopWindow> topWindows = topMetrics.getTopWindows();
    Map<String, Object> topMap = new TreeMap<String, Object>();
    topMap.put("windows", topWindows);
    topMap.put("timestamp", DFSUtil.dateToIso8601String(now));
    try {
        return JsonUtil.toJsonString(topMap);
    } catch (IOException e) {
        LOG.warn("Failed to fetch TopUser metrics", e);
    }
    return null;
}
Also used : IOException(java.io.IOException) TreeMap(java.util.TreeMap) Date(java.util.Date)

Aggregations

Date (java.util.Date)11526 Test (org.junit.Test)2903 SimpleDateFormat (java.text.SimpleDateFormat)1601 ArrayList (java.util.ArrayList)1066 Calendar (java.util.Calendar)809 HashMap (java.util.HashMap)615 IOException (java.io.IOException)606 File (java.io.File)577 ParseException (java.text.ParseException)525 GregorianCalendar (java.util.GregorianCalendar)425 List (java.util.List)336 DateFormat (java.text.DateFormat)313 Map (java.util.Map)296 DateTime (org.joda.time.DateTime)239 Test (org.testng.annotations.Test)210 HashSet (java.util.HashSet)190 SQLException (java.sql.SQLException)167 LocalDate (org.joda.time.LocalDate)155 BigDecimal (java.math.BigDecimal)148 JSONObject (org.json.JSONObject)148