use of org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException in project carbondata by apache.
the class CarbonRowDataWriterProcessorStepImpl method execute.
@Override
public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
final Iterator<CarbonRowBatch>[] iterators = child.execute();
tableIdentifier = configuration.getTableIdentifier().getCarbonTableIdentifier();
tableName = tableIdentifier.getTableName();
try {
readCounter = new long[iterators.length];
writeCounter = new long[iterators.length];
dimensionWithComplexCount = configuration.getDimensionCount();
noDictWithComplextCount = configuration.getNoDictionaryCount() + configuration.getComplexColumnCount();
dimensionCount = configuration.getDimensionCount() - noDictWithComplextCount;
isNoDictionaryDimensionColumn = CarbonDataProcessorUtil.getNoDictionaryMapping(configuration.getDataFields());
measureDataType = configuration.getMeasureDataType();
CarbonFactDataHandlerModel dataHandlerModel = CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(configuration, getStoreLocation(tableIdentifier, String.valueOf(0)), 0, 0);
measureCount = dataHandlerModel.getMeasureCount();
outputLength = measureCount + (this.noDictWithComplextCount > 0 ? 1 : 0) + 1;
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordDictionaryValue2MdkAdd2FileTime(configuration.getPartitionId(), System.currentTimeMillis());
if (iterators.length == 1) {
doExecute(iterators[0], 0, 0);
} else {
ExecutorService executorService = Executors.newFixedThreadPool(iterators.length);
Future[] futures = new Future[iterators.length];
for (int i = 0; i < iterators.length; i++) {
futures[i] = executorService.submit(new DataWriterRunnable(iterators[i], i));
}
for (Future future : futures) {
future.get();
}
}
} catch (CarbonDataWriterException e) {
LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
throw new CarbonDataLoadingException("Error while initializing data handler : " + e.getMessage());
} catch (Exception e) {
LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage(), e);
}
return null;
}
use of org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException in project carbondata by apache.
the class DataWriterBatchProcessorStepImpl method execute.
@Override
public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
Iterator<CarbonRowBatch>[] iterators = child.execute();
CarbonTableIdentifier tableIdentifier = configuration.getTableIdentifier().getCarbonTableIdentifier();
String tableName = tableIdentifier.getTableName();
try {
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordDictionaryValue2MdkAdd2FileTime(configuration.getPartitionId(), System.currentTimeMillis());
int i = 0;
for (Iterator<CarbonRowBatch> iterator : iterators) {
String storeLocation = getStoreLocation(tableIdentifier, String.valueOf(i));
int k = 0;
while (iterator.hasNext()) {
CarbonRowBatch next = iterator.next();
CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(configuration, storeLocation, i, k++);
CarbonFactHandler dataHandler = CarbonFactHandlerFactory.createCarbonFactHandler(model, CarbonFactHandlerFactory.FactHandlerType.COLUMNAR);
dataHandler.initialise();
processBatch(next, dataHandler);
finish(tableName, dataHandler);
}
i++;
}
} catch (Exception e) {
LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterBatchProcessorStepImpl");
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage());
}
return null;
}
use of org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException in project carbondata by apache.
the class DataWriterProcessorStepImpl method execute.
@Override
public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
Iterator<CarbonRowBatch>[] iterators = child.execute();
CarbonTableIdentifier tableIdentifier = configuration.getTableIdentifier().getCarbonTableIdentifier();
String tableName = tableIdentifier.getTableName();
try {
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordDictionaryValue2MdkAdd2FileTime(configuration.getPartitionId(), System.currentTimeMillis());
int i = 0;
for (Iterator<CarbonRowBatch> iterator : iterators) {
String storeLocation = getStoreLocation(tableIdentifier, String.valueOf(i));
CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(configuration, storeLocation, i, 0);
CarbonFactHandler dataHandler = null;
boolean rowsNotExist = true;
while (iterator.hasNext()) {
if (rowsNotExist) {
rowsNotExist = false;
dataHandler = CarbonFactHandlerFactory.createCarbonFactHandler(model, CarbonFactHandlerFactory.FactHandlerType.COLUMNAR);
dataHandler.initialise();
}
processBatch(iterator.next(), dataHandler);
}
if (!rowsNotExist) {
finish(tableName, dataHandler);
}
i++;
}
} catch (CarbonDataWriterException e) {
LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
throw new CarbonDataLoadingException("Error while initializing data handler : " + e.getMessage());
} catch (Exception e) {
LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage(), e);
}
return null;
}
use of org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException in project carbondata by apache.
the class ComplexFieldConverterImpl method convert.
@Override
public void convert(CarbonRow row, BadRecordLogHolder logHolder) {
Object object = row.getObject(index);
// TODO Its temporary, needs refactor here.
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
DataOutputStream dataOutputStream = new DataOutputStream(byteArray);
try {
genericDataType.writeByteArray(object, dataOutputStream);
dataOutputStream.close();
row.update(byteArray.toByteArray(), index);
} catch (Exception e) {
throw new CarbonDataLoadingException(object + "", e);
}
}
use of org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException in project carbondata by apache.
the class UnsafeParallelReadMergeSorterImpl method processRowToNextStep.
/**
* Below method will be used to process data to next step
*/
private boolean processRowToNextStep(UnsafeSortDataRows sortDataRows, SortParameters parameters) throws CarbonDataLoadingException {
if (null == sortDataRows) {
LOGGER.info("Record Processed For table: " + parameters.getTableName());
LOGGER.info("Number of Records was Zero");
String logMessage = "Summary: Carbon Sort Key Step: Read: " + 0 + ": Write: " + 0;
LOGGER.info(logMessage);
return false;
}
try {
// start sorting
sortDataRows.startSorting();
// check any more rows are present
LOGGER.info("Record Processed For table: " + parameters.getTableName());
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordSortRowsStepTotalTime(parameters.getPartitionID(), System.currentTimeMillis());
CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordDictionaryValuesTotalTime(parameters.getPartitionID(), System.currentTimeMillis());
return false;
} catch (InterruptedException e) {
throw new CarbonDataLoadingException(e);
}
}
Aggregations