Search in sources :

Example 91 with Date

use of java.util.Date in project hadoop by apache.

the class VersionInfoMojo method getBuildTime.

/**
   * Returns a string representing current build time.
   * 
   * @return String representing current build time
   */
private String getBuildTime() {
    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'");
    dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
    return dateFormat.format(new Date());
}
Also used : SimpleDateFormat(java.text.SimpleDateFormat) DateFormat(java.text.DateFormat) SimpleDateFormat(java.text.SimpleDateFormat) Date(java.util.Date)

Example 92 with Date

use of java.util.Date in project hadoop by apache.

the class Util method createWriter.

/** Create a writer of a local file. */
public static PrintWriter createWriter(File dir, String prefix) throws IOException {
    checkDirectory(dir);
    SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
    for (; ; ) {
        final File f = new File(dir, prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
        if (!f.exists())
            return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
        try {
            Thread.sleep(10);
        } catch (InterruptedException e) {
        }
    }
}
Also used : FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) SimpleDateFormat(java.text.SimpleDateFormat) File(java.io.File) Date(java.util.Date) PrintWriter(java.io.PrintWriter)

Example 93 with Date

use of java.util.Date in project hadoop by apache.

the class TestAzureFileSystemInstrumentation method testMetricsOnFileCreateRead.

@Test
public void testMetricsOnFileCreateRead() throws Exception {
    long base = getBaseWebResponses();
    assertEquals(0, AzureMetricsTestUtil.getCurrentBytesWritten(getInstrumentation()));
    Path filePath = new Path("/metricsTest_webResponses");
    final int FILE_SIZE = 1000;
    // Suppress auto-update of bandwidth metrics so we get
    // to update them exactly when we want to.
    getBandwidthGaugeUpdater().suppressAutoUpdate();
    // Create a file
    Date start = new Date();
    OutputStream outputStream = fs.create(filePath);
    outputStream.write(nonZeroByteArray(FILE_SIZE));
    outputStream.close();
    long uploadDurationMs = new Date().getTime() - start.getTime();
    // The exact number of requests/responses that happen to create a file
    // can vary  - at the time of writing this code it takes 10
    // requests/responses for the 1000 byte file (33 for 100 MB),
    // plus the initial container-check request but that
    // can very easily change in the future. Just assert that we do roughly
    // more than 2 but less than 15.
    logOpResponseCount("Creating a 1K file", base);
    base = assertWebResponsesInRange(base, 2, 15);
    getBandwidthGaugeUpdater().triggerUpdate(true);
    long bytesWritten = AzureMetricsTestUtil.getCurrentBytesWritten(getInstrumentation());
    assertTrue("The bytes written in the last second " + bytesWritten + " is pretty far from the expected range of around " + FILE_SIZE + " bytes plus a little overhead.", bytesWritten > (FILE_SIZE / 2) && bytesWritten < (FILE_SIZE * 2));
    long totalBytesWritten = AzureMetricsTestUtil.getCurrentTotalBytesWritten(getInstrumentation());
    assertTrue("The total bytes written  " + totalBytesWritten + " is pretty far from the expected range of around " + FILE_SIZE + " bytes plus a little overhead.", totalBytesWritten >= FILE_SIZE && totalBytesWritten < (FILE_SIZE * 2));
    long uploadRate = AzureMetricsTestUtil.getLongGaugeValue(getInstrumentation(), WASB_UPLOAD_RATE);
    System.out.println("Upload rate: " + uploadRate + " bytes/second.");
    long expectedRate = (FILE_SIZE * 1000L) / uploadDurationMs;
    assertTrue("The upload rate " + uploadRate + " is below the expected range of around " + expectedRate + " bytes/second that the unit test observed. This should never be" + " the case since the test underestimates the rate by looking at " + " end-to-end time instead of just block upload time.", uploadRate >= expectedRate);
    long uploadLatency = AzureMetricsTestUtil.getLongGaugeValue(getInstrumentation(), WASB_UPLOAD_LATENCY);
    System.out.println("Upload latency: " + uploadLatency);
    // We're uploading less than a block.
    long expectedLatency = uploadDurationMs;
    assertTrue("The upload latency " + uploadLatency + " should be greater than zero now that I've just uploaded a file.", uploadLatency > 0);
    assertTrue("The upload latency " + uploadLatency + " is more than the expected range of around " + expectedLatency + " milliseconds that the unit test observed. This should never be" + " the case since the test overestimates the latency by looking at " + " end-to-end time instead of just block upload time.", uploadLatency <= expectedLatency);
    // Read the file
    start = new Date();
    InputStream inputStream = fs.open(filePath);
    int count = 0;
    while (inputStream.read() >= 0) {
        count++;
    }
    inputStream.close();
    long downloadDurationMs = new Date().getTime() - start.getTime();
    assertEquals(FILE_SIZE, count);
    // Again, exact number varies. At the time of writing this code
    // it takes 4 request/responses, so just assert a rough range between
    // 1 and 10.
    logOpResponseCount("Reading a 1K file", base);
    base = assertWebResponsesInRange(base, 1, 10);
    getBandwidthGaugeUpdater().triggerUpdate(false);
    long totalBytesRead = AzureMetricsTestUtil.getCurrentTotalBytesRead(getInstrumentation());
    assertEquals(FILE_SIZE, totalBytesRead);
    long bytesRead = AzureMetricsTestUtil.getCurrentBytesRead(getInstrumentation());
    assertTrue("The bytes read in the last second " + bytesRead + " is pretty far from the expected range of around " + FILE_SIZE + " bytes plus a little overhead.", bytesRead > (FILE_SIZE / 2) && bytesRead < (FILE_SIZE * 2));
    long downloadRate = AzureMetricsTestUtil.getLongGaugeValue(getInstrumentation(), WASB_DOWNLOAD_RATE);
    System.out.println("Download rate: " + downloadRate + " bytes/second.");
    expectedRate = (FILE_SIZE * 1000L) / downloadDurationMs;
    assertTrue("The download rate " + downloadRate + " is below the expected range of around " + expectedRate + " bytes/second that the unit test observed. This should never be" + " the case since the test underestimates the rate by looking at " + " end-to-end time instead of just block download time.", downloadRate >= expectedRate);
    long downloadLatency = AzureMetricsTestUtil.getLongGaugeValue(getInstrumentation(), WASB_DOWNLOAD_LATENCY);
    System.out.println("Download latency: " + downloadLatency);
    // We're downloading less than a block.
    expectedLatency = downloadDurationMs;
    assertTrue("The download latency " + downloadLatency + " should be greater than zero now that I've just downloaded a file.", downloadLatency > 0);
    assertTrue("The download latency " + downloadLatency + " is more than the expected range of around " + expectedLatency + " milliseconds that the unit test observed. This should never be" + " the case since the test overestimates the latency by looking at " + " end-to-end time instead of just block download time.", downloadLatency <= expectedLatency);
    assertNoErrors();
}
Also used : Path(org.apache.hadoop.fs.Path) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) Date(java.util.Date) Test(org.junit.Test)

Example 94 with Date

use of java.util.Date in project hadoop by apache.

the class RandomTextWriterJob method run.

/**
   * This is the main routine for launching a distributed random write job.
   * It runs 10 maps/node and each node writes 1 gig of data to a DFS file.
   * The reduce doesn't do anything.
   * 
   * @throws IOException 
   */
public int run(String[] args) throws Exception {
    if (args.length == 0) {
        return printUsage();
    }
    Job job = createJob(getConf());
    FileOutputFormat.setOutputPath(job, new Path(args[0]));
    Date startTime = new Date();
    System.out.println("Job started: " + startTime);
    int ret = job.waitForCompletion(true) ? 0 : 1;
    Date endTime = new Date();
    System.out.println("Job ended: " + endTime);
    System.out.println("The job took " + (endTime.getTime() - startTime.getTime()) / 1000 + " seconds.");
    return ret;
}
Also used : Path(org.apache.hadoop.fs.Path) Job(org.apache.hadoop.mapreduce.Job) Date(java.util.Date)

Example 95 with Date

use of java.util.Date in project hadoop by apache.

the class DFSCIOTest method analyzeResult.

private static void analyzeResult(FileSystem fs, int testType, long execTime, String resFileName) throws IOException {
    Path reduceFile;
    if (testType == TEST_TYPE_WRITE)
        reduceFile = new Path(WRITE_DIR, "part-00000");
    else
        reduceFile = new Path(READ_DIR, "part-00000");
    DataInputStream in;
    in = new DataInputStream(fs.open(reduceFile));
    BufferedReader lines;
    lines = new BufferedReader(new InputStreamReader(in));
    long tasks = 0;
    long size = 0;
    long time = 0;
    float rate = 0;
    float sqrate = 0;
    String line;
    while ((line = lines.readLine()) != null) {
        StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%");
        String attr = tokens.nextToken();
        if (attr.endsWith(":tasks"))
            tasks = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith(":size"))
            size = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith(":time"))
            time = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith(":rate"))
            rate = Float.parseFloat(tokens.nextToken());
        else if (attr.endsWith(":sqrate"))
            sqrate = Float.parseFloat(tokens.nextToken());
    }
    double med = rate / 1000 / tasks;
    double stdDev = Math.sqrt(Math.abs(sqrate / 1000 / tasks - med * med));
    String[] resultLines = { "----- DFSCIOTest ----- : " + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"), "           Date & time: " + new Date(System.currentTimeMillis()), "       Number of files: " + tasks, "Total MBytes processed: " + size / MEGA, "     Throughput mb/sec: " + size * 1000.0 / (time * MEGA), "Average IO rate mb/sec: " + med, " Std IO rate deviation: " + stdDev, "    Test exec time sec: " + (float) execTime / 1000, "" };
    PrintStream res = new PrintStream(new FileOutputStream(new File(resFileName), true));
    for (int i = 0; i < resultLines.length; i++) {
        LOG.info(resultLines[i]);
        res.println(resultLines[i]);
    }
}
Also used : PrintStream(java.io.PrintStream) InputStreamReader(java.io.InputStreamReader) DataInputStream(java.io.DataInputStream) Date(java.util.Date) StringTokenizer(java.util.StringTokenizer) FileOutputStream(java.io.FileOutputStream) BufferedReader(java.io.BufferedReader) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File)

Aggregations

Date (java.util.Date)11526 Test (org.junit.Test)2903 SimpleDateFormat (java.text.SimpleDateFormat)1601 ArrayList (java.util.ArrayList)1066 Calendar (java.util.Calendar)809 HashMap (java.util.HashMap)615 IOException (java.io.IOException)606 File (java.io.File)577 ParseException (java.text.ParseException)525 GregorianCalendar (java.util.GregorianCalendar)425 List (java.util.List)336 DateFormat (java.text.DateFormat)313 Map (java.util.Map)296 DateTime (org.joda.time.DateTime)239 Test (org.testng.annotations.Test)210 HashSet (java.util.HashSet)190 SQLException (java.sql.SQLException)167 LocalDate (org.joda.time.LocalDate)155 BigDecimal (java.math.BigDecimal)148 JSONObject (org.json.JSONObject)148