Search in sources :

Example 1 with FloatSupplierSerializer

use of io.druid.segment.data.FloatSupplierSerializer in project druid by druid-io.

the class FloatCompressionBenchmarkFileGenerator method main.

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];
    }
    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1, 0d, ImmutableList.<Object>of(0f, 1.1f, 2.2f, 3.3f, 4.4f), ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1, 1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1, 1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.FLOAT, true, 1, 0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform("", ValueType.FLOAT, true, 1, 0d, 0, 1000);
    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));
    File dir = new File(dirPath);
    dir.mkdir();
    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((Float) entry.getValue().generateRowValue() + "\n");
            }
        }
    }
    // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            String name = entry.getKey() + "-" + compression.toString();
            System.out.print(name + ": ");
            File compFile = new File(dir, name);
            compFile.delete();
            File dataFile = new File(dir, entry.getKey());
            TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
            FloatSupplierSerializer writer = CompressionFactory.getFloatSerializer(iopeon, "float", ByteOrder.nativeOrder(), compression);
            BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));
            try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) {
                writer.open();
                String line;
                while ((line = br.readLine()) != null) {
                    writer.add(Float.parseFloat(line));
                }
                final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                writer.closeAndConsolidate(new ByteSink() {

                    @Override
                    public OutputStream openStream() throws IOException {
                        return baos;
                    }
                });
                output.write(ByteBuffer.wrap(baos.toByteArray()));
            } finally {
                iopeon.close();
                br.close();
            }
            System.out.print(compFile.length() / 1024 + "\n");
        }
    }
}
Also used : HashMap(java.util.HashMap) ByteArrayOutputStream(java.io.ByteArrayOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) CompressedObjectStrategy(io.druid.segment.data.CompressedObjectStrategy) BufferedWriter(java.io.BufferedWriter) ByteSink(com.google.common.io.ByteSink) TmpFileIOPeon(io.druid.segment.data.TmpFileIOPeon) FloatSupplierSerializer(io.druid.segment.data.FloatSupplierSerializer) InputStreamReader(java.io.InputStreamReader) FileChannel(java.nio.channels.FileChannel) BenchmarkColumnValueGenerator(io.druid.benchmark.datagen.BenchmarkColumnValueGenerator) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) FileOutputStream(java.io.FileOutputStream) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) BenchmarkColumnSchema(io.druid.benchmark.datagen.BenchmarkColumnSchema) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter) Writer(java.io.Writer)

Aggregations

ByteSink (com.google.common.io.ByteSink)1 BenchmarkColumnSchema (io.druid.benchmark.datagen.BenchmarkColumnSchema)1 BenchmarkColumnValueGenerator (io.druid.benchmark.datagen.BenchmarkColumnValueGenerator)1 CompressedObjectStrategy (io.druid.segment.data.CompressedObjectStrategy)1 FloatSupplierSerializer (io.druid.segment.data.FloatSupplierSerializer)1 TmpFileIOPeon (io.druid.segment.data.TmpFileIOPeon)1 BufferedReader (java.io.BufferedReader)1 BufferedWriter (java.io.BufferedWriter)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 FileOutputStream (java.io.FileOutputStream)1 IOException (java.io.IOException)1 InputStreamReader (java.io.InputStreamReader)1 OutputStream (java.io.OutputStream)1 OutputStreamWriter (java.io.OutputStreamWriter)1 Writer (java.io.Writer)1 FileChannel (java.nio.channels.FileChannel)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1