Search in sources :

Example 1 with CarbonReader

use of org.apache.carbondata.sdk.file.CarbonReader in project carbondata by apache.

the class SDKS3Example method main.

public static void main(String[] args) throws Exception {
    Logger logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
    if (args == null || args.length < 3) {
        logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>" + "<s3-endpoint> [table-path-on-s3] [rows] [Number of writes]");
        System.exit(0);
    }
    String backupProperty = CarbonProperties.getInstance().getProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH, CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH_DEFAULT);
    CarbonProperties.getInstance().addProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH, "true");
    String path = "s3a://sdk/WriterOutput";
    if (args.length > 3) {
        path = args[3];
    }
    int rows = 3;
    if (args.length > 4) {
        rows = Integer.parseInt(args[4]);
    }
    int num = 3;
    if (args.length > 5) {
        num = Integer.parseInt(args[5]);
    }
    Configuration conf = new Configuration(true);
    conf.set(Constants.ACCESS_KEY, args[0]);
    conf.set(Constants.SECRET_KEY, args[1]);
    conf.set(Constants.ENDPOINT, args[2]);
    Field[] fields = new Field[2];
    fields[0] = new Field("name", DataTypes.STRING);
    fields[1] = new Field("age", DataTypes.INT);
    for (int j = 0; j < num; j++) {
        CarbonWriter writer = CarbonWriter.builder().outputPath(path).withHadoopConf(conf).withCsvInput(new Schema(fields)).writtenBy("SDKS3Example").build();
        for (int i = 0; i < rows; i++) {
            writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
        }
        writer.close();
    }
    // Read data
    EqualToExpression equalToExpression = new EqualToExpression(new ColumnExpression("name", DataTypes.STRING), new LiteralExpression("robot1", DataTypes.STRING));
    CarbonReader reader = CarbonReader.builder(path, "_temp").projection(new String[] { "name", "age" }).filter(equalToExpression).withHadoopConf(conf).build();
    System.out.println("\nData:");
    int i = 0;
    while (i < 20 && reader.hasNext()) {
        Object[] row = (Object[]) reader.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader.close();
    // Read without filter
    CarbonReader reader2 = CarbonReader.builder(path, "_temp").projection(new String[] { "name", "age" }).withHadoopConf(ACCESS_KEY, args[0]).withHadoopConf(SECRET_KEY, args[1]).withHadoopConf(ENDPOINT, args[2]).build();
    System.out.println("\nData:");
    i = 0;
    while (i < 20 && reader2.hasNext()) {
        Object[] row = (Object[]) reader2.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader2.close();
    CarbonProperties.getInstance().addProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH, backupProperty);
}
Also used : EqualToExpression(org.apache.carbondata.core.scan.expression.conditional.EqualToExpression) Configuration(org.apache.hadoop.conf.Configuration) Schema(org.apache.carbondata.sdk.file.Schema) LiteralExpression(org.apache.carbondata.core.scan.expression.LiteralExpression) CarbonWriter(org.apache.carbondata.sdk.file.CarbonWriter) Logger(org.apache.log4j.Logger) Field(org.apache.carbondata.core.metadata.datatype.Field) ColumnExpression(org.apache.carbondata.core.scan.expression.ColumnExpression) CarbonReader(org.apache.carbondata.sdk.file.CarbonReader)

Example 2 with CarbonReader

use of org.apache.carbondata.sdk.file.CarbonReader in project carbondata by apache.

the class CarbonReaderExample method main.

public static void main(String[] args) {
    String path = "./testWriteFiles";
    try {
        FileUtils.deleteDirectory(new File(path));
        CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT).addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT);
        Field[] fields = new Field[11];
        fields[0] = new Field("stringField", DataTypes.STRING);
        fields[1] = new Field("shortField", DataTypes.SHORT);
        fields[2] = new Field("intField", DataTypes.INT);
        fields[3] = new Field("longField", DataTypes.LONG);
        fields[4] = new Field("doubleField", DataTypes.DOUBLE);
        fields[5] = new Field("boolField", DataTypes.BOOLEAN);
        fields[6] = new Field("dateField", DataTypes.DATE);
        fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
        fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
        fields[9] = new Field("varcharField", DataTypes.VARCHAR);
        fields[10] = new Field("arrayField", DataTypes.createArrayType(DataTypes.STRING));
        CarbonWriter writer = CarbonWriter.builder().outputPath(path).withLoadOption("complex_delimiter_level_1", "#").withCsvInput(new Schema(fields)).writtenBy("CarbonReaderExample").build();
        for (int i = 0; i < 10; i++) {
            String[] row2 = new String[] { "robot" + (i % 10), String.valueOf(i % 10000), String.valueOf(i), String.valueOf(Long.MAX_VALUE - i), String.valueOf((double) i / 2), String.valueOf(true), "2019-03-02", "2019-02-12 03:03:34", "12.345", "varchar", "Hello#World#From#Carbon" };
            writer.write(row2);
        }
        writer.close();
        File[] dataFiles = new File(path).listFiles(new FilenameFilter() {

            @Override
            public boolean accept(File dir, String name) {
                if (name == null) {
                    return false;
                }
                return name.endsWith("carbonindex");
            }
        });
        if (dataFiles == null || dataFiles.length < 1) {
            throw new RuntimeException("Carbon index file not exists.");
        }
        Schema schema = CarbonSchemaReader.readSchema(dataFiles[0].getAbsolutePath()).asOriginOrder();
        // Transform the schema
        String[] strings = new String[schema.getFields().length];
        for (int i = 0; i < schema.getFields().length; i++) {
            strings[i] = (schema.getFields())[i].getFieldName();
        }
        // Read data
        CarbonReader reader = CarbonReader.builder(path, "_temp").projection(strings).build();
        System.out.println("\nData:");
        int i = 0;
        while (reader.hasNext()) {
            Object[] row = (Object[]) reader.readNextRow();
            System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t", i, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9]));
            Object[] arr = (Object[]) row[10];
            for (int j = 0; j < arr.length; j++) {
                System.out.print(arr[j] + " ");
            }
            assert (arr[0].equals("Hello"));
            assert (arr[3].equals("Carbon"));
            System.out.println();
            i++;
        }
        reader.close();
        // Read data
        CarbonReader reader2 = CarbonReader.builder(path, "_temp").build();
        System.out.println("\nData:");
        i = 0;
        while (reader2.hasNext()) {
            Object[] row = (Object[]) reader2.readNextRow();
            System.out.print(String.format("%s\t%s\t%s\t%s\t%s\t", i, row[0], row[1], row[2], row[3]));
            Object[] arr = (Object[]) row[4];
            for (int j = 0; j < arr.length; j++) {
                System.out.print(arr[j] + " ");
            }
            System.out.println(String.format("\t%s\t%s\t%s\t%s\t%s\t%s\t", row[5], row[6], row[7], row[8], row[9], row[10]));
            i++;
        }
        reader2.close();
    } catch (Throwable e) {
        e.printStackTrace();
        assert (false);
        System.out.println(e.getMessage());
    } finally {
        try {
            FileUtils.deleteDirectory(new File(path));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}
Also used : Schema(org.apache.carbondata.sdk.file.Schema) CarbonWriter(org.apache.carbondata.sdk.file.CarbonWriter) IOException(java.io.IOException) Field(org.apache.carbondata.core.metadata.datatype.Field) FilenameFilter(java.io.FilenameFilter) CarbonReader(org.apache.carbondata.sdk.file.CarbonReader) File(java.io.File)

Example 3 with CarbonReader

use of org.apache.carbondata.sdk.file.CarbonReader in project carbondata by apache.

the class SDKS3ReadExample method main.

public static void main(String[] args) throws Exception {
    Logger logger = LogServiceFactory.getLogService(SDKS3ReadExample.class.getName());
    if (args == null || args.length < 3) {
        logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>" + "<s3-endpoint> [table-path-on-s3]");
        System.exit(0);
    }
    String path = "s3a://sdk/WriterOutput/carbondata5";
    if (args.length > 3) {
        path = args[3];
    }
    // 1. read with file list
    Configuration conf = new Configuration();
    conf.set(ACCESS_KEY, args[0]);
    conf.set(SECRET_KEY, args[1]);
    conf.set(ENDPOINT, args[2]);
    List fileLists = listFiles(path, CarbonTablePath.CARBON_DATA_EXT, conf);
    // Read data
    EqualToExpression equalToExpression = new EqualToExpression(new ColumnExpression("name", DataTypes.STRING), new LiteralExpression("robot1", DataTypes.STRING));
    CarbonReader reader = CarbonReader.builder().projection(new String[] { "name", "age" }).filter(equalToExpression).withHadoopConf(ACCESS_KEY, args[0]).withHadoopConf(SECRET_KEY, args[1]).withHadoopConf(ENDPOINT, args[2]).withFileLists(fileLists).build();
    System.out.println("\nData:");
    int i = 0;
    while (i < 20 && reader.hasNext()) {
        Object[] row = (Object[]) reader.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader.close();
    // Read without filter
    CarbonReader reader2 = CarbonReader.builder(path, "_temp").projection(new String[] { "name", "age" }).withHadoopConf(ACCESS_KEY, args[0]).withHadoopConf(SECRET_KEY, args[1]).withHadoopConf(ENDPOINT, args[2]).build();
    System.out.println("\nData:");
    i = 0;
    while (i < 20 && reader2.hasNext()) {
        Object[] row = (Object[]) reader2.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader2.close();
}
Also used : EqualToExpression(org.apache.carbondata.core.scan.expression.conditional.EqualToExpression) Configuration(org.apache.hadoop.conf.Configuration) ColumnExpression(org.apache.carbondata.core.scan.expression.ColumnExpression) LiteralExpression(org.apache.carbondata.core.scan.expression.LiteralExpression) CarbonReader(org.apache.carbondata.sdk.file.CarbonReader) List(java.util.List) Logger(org.apache.log4j.Logger)

Aggregations

CarbonReader (org.apache.carbondata.sdk.file.CarbonReader)3 Field (org.apache.carbondata.core.metadata.datatype.Field)2 ColumnExpression (org.apache.carbondata.core.scan.expression.ColumnExpression)2 LiteralExpression (org.apache.carbondata.core.scan.expression.LiteralExpression)2 EqualToExpression (org.apache.carbondata.core.scan.expression.conditional.EqualToExpression)2 CarbonWriter (org.apache.carbondata.sdk.file.CarbonWriter)2 Schema (org.apache.carbondata.sdk.file.Schema)2 Configuration (org.apache.hadoop.conf.Configuration)2 Logger (org.apache.log4j.Logger)2 File (java.io.File)1 FilenameFilter (java.io.FilenameFilter)1 IOException (java.io.IOException)1 List (java.util.List)1