use of org.apache.carbondata.processing.newflow.converter.FieldConverter in project carbondata by apache.
the class RowConverterImpl method createCopyForNewThread.
@Override
public RowConverter createCopyForNewThread() {
RowConverterImpl converter = new RowConverterImpl(this.fields, this.configuration, this.badRecordLogger);
List<FieldConverter> fieldConverterList = new ArrayList<>();
DictionaryClient client = createDictionaryClient();
dictClients.add(client);
String nullFormat = configuration.getDataLoadProperty(DataLoadProcessorConstants.SERIALIZATION_NULL_FORMAT).toString();
boolean isEmptyBadRecord = Boolean.parseBoolean(configuration.getDataLoadProperty(DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD).toString());
for (int i = 0; i < fields.length; i++) {
FieldConverter fieldConverter = null;
try {
fieldConverter = FieldEncoderFactory.getInstance().createFieldEncoder(fields[i], cache, configuration.getTableIdentifier().getCarbonTableIdentifier(), i, nullFormat, client, configuration.getUseOnePass(), configuration.getTableIdentifier().getStorePath(), false, localCaches[i], isEmptyBadRecord);
} catch (IOException e) {
throw new RuntimeException(e);
}
fieldConverterList.add(fieldConverter);
}
converter.fieldConverters = fieldConverterList.toArray(new FieldConverter[fieldConverterList.size()]);
converter.logHolder = new BadRecordLogHolder();
return converter;
}
use of org.apache.carbondata.processing.newflow.converter.FieldConverter in project carbondata by apache.
the class RowConverterImpl method initialize.
@Override
public void initialize() throws IOException {
CacheProvider cacheProvider = CacheProvider.getInstance();
cache = cacheProvider.createCache(CacheType.REVERSE_DICTIONARY, configuration.getTableIdentifier().getStorePath());
String nullFormat = configuration.getDataLoadProperty(DataLoadProcessorConstants.SERIALIZATION_NULL_FORMAT).toString();
boolean isEmptyBadRecord = Boolean.parseBoolean(configuration.getDataLoadProperty(DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD).toString());
List<FieldConverter> fieldConverterList = new ArrayList<>();
localCaches = new Map[fields.length];
long lruCacheStartTime = System.currentTimeMillis();
DictionaryClient client = createDictionaryClient();
dictClients.add(client);
for (int i = 0; i < fields.length; i++) {
localCaches[i] = new ConcurrentHashMap<>();
FieldConverter fieldConverter = FieldEncoderFactory.getInstance().createFieldEncoder(fields[i], cache, configuration.getTableIdentifier().getCarbonTableIdentifier(), i, nullFormat, client, configuration.getUseOnePass(), configuration.getTableIdentifier().getStorePath(), true, localCaches[i], isEmptyBadRecord);
fieldConverterList.add(fieldConverter);
}
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordLruCacheLoadTime((System.currentTimeMillis() - lruCacheStartTime) / 1000.0);
fieldConverters = fieldConverterList.toArray(new FieldConverter[fieldConverterList.size()]);
logHolder = new BadRecordLogHolder();
}
Aggregations