Search in sources :

Example 1 with Gson

use of com.facebook.presto.hadoop.$internal.com.google.gson.Gson in project carbondata by apache.

the class CarbonTableReader method getInputSplits2.

public List<CarbonLocalInputSplit> getInputSplits2(CarbonTableCacheModel tableCacheModel, Expression filters) {
    List<CarbonLocalInputSplit> result = new ArrayList<>();
    if (config.getUnsafeMemoryInMb() != null) {
        CarbonProperties.getInstance().addProperty(CarbonCommonConstants.UNSAFE_WORKING_MEMORY_IN_MB, config.getUnsafeMemoryInMb());
    }
    CarbonTable carbonTable = tableCacheModel.carbonTable;
    TableInfo tableInfo = tableCacheModel.carbonTable.getTableInfo();
    Configuration config = new Configuration();
    config.set(CarbonTableInputFormat.INPUT_SEGMENT_NUMBERS, "");
    String carbonTablePath = carbonTable.getAbsoluteTableIdentifier().getTablePath();
    config.set(CarbonTableInputFormat.INPUT_DIR, carbonTablePath);
    config.set(CarbonTableInputFormat.DATABASE_NAME, carbonTable.getDatabaseName());
    config.set(CarbonTableInputFormat.TABLE_NAME, carbonTable.getTableName());
    try {
        CarbonTableInputFormat.setTableInfo(config, tableInfo);
        CarbonTableInputFormat carbonTableInputFormat = createInputFormat(config, carbonTable.getAbsoluteTableIdentifier(), filters);
        JobConf jobConf = new JobConf(config);
        Job job = Job.getInstance(jobConf);
        List<InputSplit> splits = carbonTableInputFormat.getSplits(job);
        CarbonInputSplit carbonInputSplit = null;
        Gson gson = new Gson();
        if (splits != null && splits.size() > 0) {
            for (InputSplit inputSplit : splits) {
                carbonInputSplit = (CarbonInputSplit) inputSplit;
                result.add(new CarbonLocalInputSplit(carbonInputSplit.getSegmentId(), carbonInputSplit.getPath().toString(), carbonInputSplit.getStart(), carbonInputSplit.getLength(), Arrays.asList(carbonInputSplit.getLocations()), carbonInputSplit.getNumberOfBlocklets(), carbonInputSplit.getVersion().number(), carbonInputSplit.getDeleteDeltaFiles(), gson.toJson(carbonInputSplit.getDetailInfo())));
            }
        }
    } catch (IOException e) {
        throw new RuntimeException("Error creating Splits from CarbonTableInputFormat", e);
    }
    return result;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Gson(com.facebook.presto.hadoop.$internal.com.google.gson.Gson) CarbonInputSplit(org.apache.carbondata.hadoop.CarbonInputSplit) IOException(java.io.IOException) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) CarbonTableInputFormat(org.apache.carbondata.hadoop.api.CarbonTableInputFormat) TableInfo(org.apache.carbondata.core.metadata.schema.table.TableInfo) Job(org.apache.hadoop.mapreduce.Job) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapreduce.InputSplit) CarbonInputSplit(org.apache.carbondata.hadoop.CarbonInputSplit)

Aggregations

Gson (com.facebook.presto.hadoop.$internal.com.google.gson.Gson)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 CarbonTable (org.apache.carbondata.core.metadata.schema.table.CarbonTable)1 TableInfo (org.apache.carbondata.core.metadata.schema.table.TableInfo)1 CarbonInputSplit (org.apache.carbondata.hadoop.CarbonInputSplit)1 CarbonTableInputFormat (org.apache.carbondata.hadoop.api.CarbonTableInputFormat)1 Configuration (org.apache.hadoop.conf.Configuration)1 JobConf (org.apache.hadoop.mapred.JobConf)1 InputSplit (org.apache.hadoop.mapreduce.InputSplit)1 Job (org.apache.hadoop.mapreduce.Job)1