Search in sources :

Example 46 with BufferedOutputStream

use of java.io.BufferedOutputStream in project j2objc by google.

the class OldDataOutputStreamTest method test_flush.

public void test_flush() throws IOException {
    BufferedOutputStream buf = new BufferedOutputStream(bos);
    os = new DataOutputStream(buf);
    os.writeInt(9087589);
    assertTrue("Test 1: Written data should not be available.", bos.toByteArray().length == 0);
    os.flush();
    assertTrue("Test 2: Written data should be available.", bos.toByteArray().length > 0);
    os.close();
    openDataInputStream();
    int c = dis.readInt();
    assertEquals("Test 3: Failed to flush correctly;", 9087589, c);
    dis.close();
    os = new DataOutputStream(sos);
    try {
        os.flush();
        fail("Test 4: IOException expected.");
    } catch (IOException e) {
    // Expected.
    }
}
Also used : DataOutputStream(java.io.DataOutputStream) IOException(java.io.IOException) BufferedOutputStream(java.io.BufferedOutputStream)

Example 47 with BufferedOutputStream

use of java.io.BufferedOutputStream in project j2objc by google.

the class OldBufferedOutputStreamTest method test_write$BII_Exception.

public void test_write$BII_Exception() throws IOException {
    OutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream());
    byte[] nullByteArray = null;
    byte[] byteArray = new byte[10];
    try {
        bos.write(nullByteArray, 0, 1);
        fail("Test 1: NullPointerException expected.");
    } catch (NullPointerException e) {
    // Expected.
    }
    try {
        bos.write(byteArray, -1, 1);
        fail("Test 2: IndexOutOfBoundsException expected.");
    } catch (IndexOutOfBoundsException e) {
    // Expected
    }
    try {
        bos.write(byteArray, 0, -1);
        fail("Test 3: IndexOutOfBoundsException expected.");
    } catch (IndexOutOfBoundsException e) {
    // Expected
    }
    try {
        bos.write(byteArray, 1, 10);
        fail("Test 4: IndexOutOfBoundsException expected.");
    } catch (IndexOutOfBoundsException e) {
    // Expected
    }
}
Also used : OutputStream(java.io.OutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Support_OutputStream(tests.support.Support_OutputStream) BufferedOutputStream(java.io.BufferedOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BufferedOutputStream(java.io.BufferedOutputStream)

Example 48 with BufferedOutputStream

use of java.io.BufferedOutputStream in project j2objc by google.

the class ZipOutputStreamTest method testCreateEmpty.

/**
     * Reference implementation does NOT allow writing of an empty zip using a
     * {@link ZipOutputStream}.
     */
public void testCreateEmpty() throws IOException {
    File result = File.createTempFile("ZipFileTest", "zip");
    ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(result)));
    try {
        out.close();
        fail("Close on empty stream failed to throw exception");
    } catch (ZipException e) {
    // expected
    }
}
Also used : ZipOutputStream(java.util.zip.ZipOutputStream) FileOutputStream(java.io.FileOutputStream) ZipException(java.util.zip.ZipException) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream)

Example 49 with BufferedOutputStream

use of java.io.BufferedOutputStream in project j2objc by google.

the class ZipEntryTest method testMaxLengthExtra_zip64.

public void testMaxLengthExtra_zip64() throws Exception {
    // Not quite the max length (65535), but large enough that there's no space
    // for the zip64 extended info header.
    byte[] maxLengthExtra = new byte[65530];
    File f = createTemporaryZipFile();
    ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(f)), true);
    ZipEntry ze = new ZipEntry("x");
    ze.setExtra(maxLengthExtra);
    try {
        out.putNextEntry(ze);
        fail();
    } catch (ZipException expected) {
    }
}
Also used : ZipOutputStream(java.util.zip.ZipOutputStream) FileOutputStream(java.io.FileOutputStream) ZipEntry(java.util.zip.ZipEntry) ZipException(java.util.zip.ZipException) ZipFile(java.util.zip.ZipFile) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream)

Example 50 with BufferedOutputStream

use of java.io.BufferedOutputStream in project Cloud9 by lintool.

the class HadoopAlign method doAlignment.

@SuppressWarnings("deprecation")
public static void doAlignment(int mapTasks, int reduceTasks, HadoopAlignConfig hac) throws IOException {
    System.out.println("Running alignment: " + hac);
    FileSystem fs = FileSystem.get(hac);
    Path cbtxt = new Path(hac.getRoot() + "/comp-bitext");
    //		fs.delete(cbtxt, true);
    if (!fs.exists(cbtxt)) {
        CorpusVocabNormalizerAndNumberizer.preprocessAndNumberizeFiles(hac, hac.getBitexts(), cbtxt);
    }
    System.out.println("Finished preprocessing");
    int m1iters = hac.getModel1Iterations();
    int hmmiters = hac.getHMMIterations();
    int totalIterations = m1iters + hmmiters;
    String modelType = null;
    ArrayList<Double> perps = new ArrayList<Double>();
    ArrayList<Double> aers = new ArrayList<Double>();
    boolean hmm = false;
    boolean firstHmm = true;
    Path model1PosteriorsPath = null;
    for (int iteration = 0; iteration < totalIterations; iteration++) {
        long start = System.currentTimeMillis();
        hac.setBoolean("ha.generate.posterios", false);
        boolean lastIteration = (iteration == totalIterations - 1);
        boolean lastModel1Iteration = (iteration == m1iters - 1);
        if (iteration >= m1iters)
            hmm = true;
        if (hmm)
            modelType = "HMM";
        else
            modelType = "Model1";
        FileSystem fileSys = FileSystem.get(hac);
        String sOutputPath = modelType + ".data." + iteration;
        Path outputPath = new Path(sOutputPath);
        try {
            if (// no probs in first iteration!
            usePServer && iteration > 0)
                startPServers(hac);
            System.out.println("Starting iteration " + iteration + (iteration == 0 ? " (initialization)" : "") + ": " + modelType);
            JobConf conf = new JobConf(hac, HadoopAlign.class);
            conf.setJobName("EMTrain." + modelType + ".iter" + iteration);
            conf.setInputFormat(SequenceFileInputFormat.class);
            conf.set(KEY_TRAINER, MODEL1_TRAINER);
            conf.set(KEY_ITERATION, Integer.toString(iteration));
            conf.set("mapred.child.java.opts", "-Xmx2048m");
            if (iteration == 0)
                conf.set(KEY_TRAINER, MODEL1_UNIFORM_INIT);
            if (hmm) {
                conf.set(KEY_TRAINER, HMM_TRAINER);
                if (firstHmm) {
                    firstHmm = false;
                    System.out.println("Writing default a-table...");
                    Path pathATable = hac.getATablePath();
                    fileSys.delete(pathATable, true);
                    DataOutputStream dos = new DataOutputStream(new BufferedOutputStream(fileSys.create(pathATable)));
                    int cond_values = 1;
                    if (!hac.isHMMHomogeneous()) {
                        cond_values = 100;
                    }
                    ATable at = new ATable(hac.isHMMHomogeneous(), cond_values, 100);
                    at.normalize();
                    at.write(dos);
                    //			System.out.println(at);
                    dos.close();
                }
            }
            conf.setOutputKeyClass(IntWritable.class);
            conf.setOutputValueClass(PartialCountContainer.class);
            conf.setMapperClass(EMapper.class);
            conf.setReducerClass(EMReducer.class);
            conf.setNumMapTasks(mapTasks);
            conf.setNumReduceTasks(reduceTasks);
            System.out.println("Running job " + conf.getJobName());
            // otherwise, input is set to output of last model 1 iteration
            if (model1PosteriorsPath != null) {
                System.out.println("Input: " + model1PosteriorsPath);
                FileInputFormat.setInputPaths(conf, model1PosteriorsPath);
            } else {
                System.out.println("Input: " + cbtxt);
                FileInputFormat.setInputPaths(conf, cbtxt);
            }
            System.out.println("Output: " + outputPath);
            FileOutputFormat.setOutputPath(conf, new Path(hac.getRoot() + "/" + outputPath.toString()));
            fileSys.delete(new Path(hac.getRoot() + "/" + outputPath.toString()), true);
            conf.setOutputFormat(SequenceFileOutputFormat.class);
            RunningJob job = JobClient.runJob(conf);
            Counters c = job.getCounters();
            double lp = c.getCounter(CrossEntropyCounters.LOGPROB);
            double wc = c.getCounter(CrossEntropyCounters.WORDCOUNT);
            double ce = lp / wc / Math.log(2);
            double perp = Math.pow(2.0, ce);
            double aer = ComputeAER(c);
            System.out.println("Iteration " + iteration + ": (" + modelType + ")\tCROSS-ENTROPY: " + ce + "   PERPLEXITY: " + perp);
            System.out.println("Iteration " + iteration + ": " + aer + " AER");
            aers.add(aer);
            perps.add(perp);
        } finally {
            stopPServers();
        }
        JobConf conf = new JobConf(hac, ModelMergeMapper2.class);
        System.err.println("Setting " + TTABLE_ITERATION_OUTPUT + " to " + outputPath.toString());
        conf.set(TTABLE_ITERATION_OUTPUT, hac.getRoot() + "/" + outputPath.toString());
        conf.setJobName("EMTrain.ModelMerge");
        //			conf.setOutputKeyClass(LongWritable.class);
        conf.setMapperClass(ModelMergeMapper2.class);
        conf.setSpeculativeExecution(false);
        conf.setNumMapTasks(1);
        conf.setNumReduceTasks(0);
        conf.setInputFormat(NullInputFormat.class);
        conf.setOutputFormat(NullOutputFormat.class);
        conf.set("mapred.map.child.java.opts", "-Xmx2048m");
        conf.set("mapred.reduce.child.java.opts", "-Xmx2048m");
        //			FileInputFormat.setInputPaths(conf, root+"/dummy");
        //			fileSys.delete(new Path(root+"/dummy.out"), true);
        //			FileOutputFormat.setOutputPath(conf, new Path(root+"/dummy.out"));
        //			conf.setOutputFormat(SequenceFileOutputFormat.class);
        System.out.println("Running job " + conf.getJobName());
        System.out.println("Input: " + hac.getRoot() + "/dummy");
        System.out.println("Output: " + hac.getRoot() + "/dummy.out");
        JobClient.runJob(conf);
        fileSys.delete(new Path(hac.getRoot() + "/" + outputPath.toString()), true);
        if (lastIteration || lastModel1Iteration) {
            //hac.setBoolean("ha.generate.posteriors", true);
            conf = new JobConf(hac, HadoopAlign.class);
            sOutputPath = modelType + ".data." + iteration;
            outputPath = new Path(sOutputPath);
            conf.setJobName(modelType + ".align");
            conf.set("mapred.map.child.java.opts", "-Xmx2048m");
            conf.set("mapred.reduce.child.java.opts", "-Xmx2048m");
            // TODO use file cache
            /*try {
					if (hmm || iteration > 0) {
						URI ttable = new URI(fileSys.getHomeDirectory() + Path.SEPARATOR + hac.getTTablePath().toString());
						DistributedCache.addCacheFile(ttable, conf);
						System.out.println("cache<-- " + ttable);
					}

				} catch (Exception e) { throw new RuntimeException("Caught " + e); }
         */
            conf.setInputFormat(SequenceFileInputFormat.class);
            conf.setOutputFormat(SequenceFileOutputFormat.class);
            conf.set(KEY_TRAINER, MODEL1_TRAINER);
            conf.set(KEY_ITERATION, Integer.toString(iteration));
            if (hmm)
                conf.set(KEY_TRAINER, HMM_TRAINER);
            conf.setOutputKeyClass(Text.class);
            conf.setOutputValueClass(PhrasePair.class);
            conf.setMapperClass(AlignMapper.class);
            conf.setReducerClass(IdentityReducer.class);
            conf.setNumMapTasks(mapTasks);
            conf.setNumReduceTasks(reduceTasks);
            FileOutputFormat.setOutputPath(conf, new Path(hac.getRoot() + "/" + outputPath.toString()));
            //if last model1 iteration, save output path, to be used as input path in later iterations
            if (lastModel1Iteration) {
                FileInputFormat.setInputPaths(conf, cbtxt);
                model1PosteriorsPath = new Path(hac.getRoot() + "/" + outputPath.toString());
            } else {
                FileInputFormat.setInputPaths(conf, model1PosteriorsPath);
            }
            fileSys.delete(outputPath, true);
            System.out.println("Running job " + conf.getJobName());
            RunningJob job = JobClient.runJob(conf);
            System.out.println("GENERATED: " + model1PosteriorsPath);
            Counters c = job.getCounters();
            double aer = ComputeAER(c);
            //				System.out.println("Iteration " + iteration + ": (" + modelType + ")\tCROSS-ENTROPY: " + ce + "   PERPLEXITY: " + perp);
            System.out.println("Iteration " + iteration + ": " + aer + " AER");
            aers.add(aer);
            perps.add(0.0);
        }
        long end = System.currentTimeMillis();
        System.out.println(modelType + " iteration " + iteration + " took " + ((end - start) / 1000) + " seconds.");
    }
    for (int i = 0; i < perps.size(); i++) {
        System.out.print("I=" + i + "\t");
        if (aers.size() > 0) {
            System.out.print(aers.get(i) + "\t");
        }
        System.out.println(perps.get(i));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) ATable(edu.umd.hooka.alignment.hmm.ATable) FileSystem(org.apache.hadoop.fs.FileSystem) RunningJob(org.apache.hadoop.mapred.RunningJob) Counters(org.apache.hadoop.mapred.Counters) JobConf(org.apache.hadoop.mapred.JobConf) BufferedOutputStream(java.io.BufferedOutputStream)

Aggregations

BufferedOutputStream (java.io.BufferedOutputStream)1219 FileOutputStream (java.io.FileOutputStream)861 IOException (java.io.IOException)617 File (java.io.File)519 OutputStream (java.io.OutputStream)350 BufferedInputStream (java.io.BufferedInputStream)238 InputStream (java.io.InputStream)166 DataOutputStream (java.io.DataOutputStream)158 FileInputStream (java.io.FileInputStream)145 ZipOutputStream (java.util.zip.ZipOutputStream)121 FileNotFoundException (java.io.FileNotFoundException)113 ZipEntry (java.util.zip.ZipEntry)108 ByteArrayOutputStream (java.io.ByteArrayOutputStream)101 ZipFile (java.util.zip.ZipFile)62 URL (java.net.URL)57 XmlSerializer (org.xmlpull.v1.XmlSerializer)57 FastXmlSerializer (com.android.internal.util.FastXmlSerializer)56 ObjectOutputStream (java.io.ObjectOutputStream)54 GZIPOutputStream (java.util.zip.GZIPOutputStream)51 PrintStream (java.io.PrintStream)46