use of org.apache.hive.hcatalog.data.HCatRecord in project hive by apache.
the class HCatInputFormatReader method read.
@Override
public Iterator<HCatRecord> read() throws HCatException {
HCatInputFormat inpFmt = new HCatInputFormat();
RecordReader<WritableComparable, HCatRecord> rr;
try {
TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID());
rr = inpFmt.createRecordReader(split, cntxt);
rr.initialize(split, cntxt);
} catch (IOException e) {
throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
} catch (InterruptedException e) {
throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
}
return new HCatRecordItr(rr);
}
use of org.apache.hive.hcatalog.data.HCatRecord in project hive by apache.
the class HCatOutputFormatWriter method write.
@Override
public void write(Iterator<HCatRecord> recordItr) throws HCatException {
int id = sp.getId();
setVarsInConf(id);
HCatOutputFormat outFormat = new HCatOutputFormat();
TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id));
OutputCommitter committer = null;
RecordWriter<WritableComparable<?>, HCatRecord> writer;
try {
committer = outFormat.getOutputCommitter(cntxt);
committer.setupTask(cntxt);
writer = outFormat.getRecordWriter(cntxt);
while (recordItr.hasNext()) {
HCatRecord rec = recordItr.next();
writer.write(null, rec);
}
writer.close(cntxt);
if (committer.needsTaskCommit(cntxt)) {
committer.commitTask(cntxt);
}
} catch (IOException e) {
if (null != committer) {
try {
committer.abortTask(cntxt);
} catch (IOException e1) {
throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1);
}
}
throw new HCatException("Failed while writing", e);
} catch (InterruptedException e) {
if (null != committer) {
try {
committer.abortTask(cntxt);
} catch (IOException e1) {
throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1);
}
}
throw new HCatException("Failed while writing", e);
}
}
use of org.apache.hive.hcatalog.data.HCatRecord in project beam by apache.
the class HCatalogIOTest method testReadFromSource.
/**
* Test of Read using SourceTestUtils.readFromSource(..).
*/
@Test
@NeedsTestData
public void testReadFromSource() throws Exception {
ReaderContext context = getReaderContext(getConfigPropertiesAsMap(service.getHiveConf()));
HCatalogIO.Read spec = HCatalogIO.read().withConfigProperties(getConfigPropertiesAsMap(service.getHiveConf())).withContext(context).withTable(TEST_TABLE);
List<String> records = new ArrayList<>();
for (int i = 0; i < context.numSplits(); i++) {
BoundedHCatalogSource source = new BoundedHCatalogSource(spec.withSplitId(i));
for (HCatRecord record : SourceTestUtils.readFromSource(source, OPTIONS)) {
records.add(record.get(0).toString());
}
}
assertThat(records, containsInAnyOrder(getExpectedRecords(TEST_RECORDS_COUNT).toArray()));
}
use of org.apache.hive.hcatalog.data.HCatRecord in project flink by apache.
the class HCatInputFormatBase method nextRecord.
@Override
public T nextRecord(T record) throws IOException {
if (!this.fetched) {
// first record
fetchNext();
}
if (!this.hasNext) {
return null;
}
try {
// get next HCatRecord
HCatRecord v = this.recordReader.getCurrentValue();
this.fetched = false;
if (this.fieldNames.length > 0) {
// return as Flink tuple
return this.buildFlinkTuple(record, v);
} else {
// return as HCatRecord
return (T) v;
}
} catch (InterruptedException e) {
throw new IOException("Could not get next record.", e);
}
}
Aggregations