use of org.apache.hadoop.hive.ql.io.HiveOutputFormat in project hive by apache.
the class Utilities method createDummyFileForEmptyPartition.
@SuppressWarnings("rawtypes")
private static Path createDummyFileForEmptyPartition(Path path, JobConf job, MapWork work, Path hiveScratchDir) throws Exception {
String strPath = path.toString();
// The input file does not exist, replace it by a empty file
PartitionDesc partDesc = work.getPathToPartitionInfo().get(path);
if (partDesc.getTableDesc().isNonNative()) {
// if this isn't a hive table we can't create an empty file for it.
return path;
}
Properties props = SerDeUtils.createOverlayedProperties(partDesc.getTableDesc().getProperties(), partDesc.getProperties());
HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, partDesc);
boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class;
Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, oneRow);
if (LOG.isInfoEnabled()) {
LOG.info("Changed input file " + strPath + " to empty file " + newPath + " (" + oneRow + ")");
}
// update the work
work.addPathToAlias(newPath, work.getPathToAliases().get(path));
work.removePathToAlias(path);
work.removePathToPartitionInfo(path);
work.addPathToPartitionInfo(newPath, partDesc);
return newPath;
}
use of org.apache.hadoop.hive.ql.io.HiveOutputFormat in project presto by prestodb.
the class HiveWriteUtils method createRecordWriter.
public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName) {
try {
boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT);
Object writer = Class.forName(outputFormatName).getConstructor().newInstance();
return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL);
} catch (IOException | ReflectiveOperationException e) {
throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
}
}
use of org.apache.hadoop.hive.ql.io.HiveOutputFormat in project presto by prestodb.
the class HiveWriteUtils method createRecordWriter.
public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName, ConnectorSession session) {
try {
boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT);
if (outputFormatName.equals(RCFileOutputFormat.class.getName())) {
return createRcFileWriter(target, conf, properties, compress);
}
if (outputFormatName.equals(MapredParquetOutputFormat.class.getName())) {
return createParquetWriter(target, conf, properties, compress, session);
}
Object writer = Class.forName(outputFormatName).getConstructor().newInstance();
return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL);
} catch (IOException | ReflectiveOperationException e) {
throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
}
}
use of org.apache.hadoop.hive.ql.io.HiveOutputFormat in project flink by apache.
the class HiveShimV100 method getHiveRecordWriter.
@Override
public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf, Class outputFormatClz, Class<? extends Writable> outValClz, boolean isCompressed, Properties tableProps, Path outPath) {
try {
Class utilClass = HiveFileFormatUtils.class;
HiveOutputFormat outputFormat = (HiveOutputFormat) outputFormatClz.newInstance();
Method utilMethod = utilClass.getDeclaredMethod("getRecordWriter", JobConf.class, HiveOutputFormat.class, Class.class, boolean.class, Properties.class, Path.class, Reporter.class);
return (FileSinkOperator.RecordWriter) utilMethod.invoke(null, jobConf, outputFormat, outValClz, isCompressed, tableProps, outPath, Reporter.NULL);
} catch (Exception e) {
throw new CatalogException("Failed to create Hive RecordWriter", e);
}
}
use of org.apache.hadoop.hive.ql.io.HiveOutputFormat in project hive by apache.
the class Utilities method createDummyFileForEmptyPartition.
@SuppressWarnings("rawtypes")
private static Path createDummyFileForEmptyPartition(Path path, JobConf job, PartitionDesc partDesc, Path hiveScratchDir) throws Exception {
String strPath = path.toString();
// The input file does not exist, replace it by a empty file
if (partDesc.getTableDesc().isNonNative()) {
// if this isn't a hive table we can't create an empty file for it.
return path;
}
Properties props = SerDeUtils.createOverlayedProperties(partDesc.getTableDesc().getProperties(), partDesc.getProperties());
HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, partDesc);
boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class;
Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, oneRow);
LOG.info("Changed input file {} to empty file {} ({})", strPath, newPath, oneRow);
return newPath;
}
Aggregations