use of org.apache.hive.hcatalog.data.transfer.HCatReader in project hive by apache.
the class TestReaderWriter method runsInSlave.
private void runsInSlave(ReaderContext cntxt, int slaveNum) throws HCatException {
HCatReader reader = DataTransferFactory.getHCatReader(cntxt, slaveNum);
Iterator<HCatRecord> itr = reader.read();
int i = 1;
while (itr.hasNext()) {
HCatRecord read = itr.next();
HCatRecord written = getRecord(i++);
// Argh, HCatRecord doesnt implement equals()
Assert.assertTrue("Read: " + read.get(0) + "Written: " + written.get(0), written.get(0).equals(read.get(0)));
Assert.assertTrue("Read: " + read.get(1) + "Written: " + written.get(1), written.get(1).equals(read.get(1)));
Assert.assertEquals(2, read.size());
}
//Assert.assertFalse(itr.hasNext());
}
use of org.apache.hive.hcatalog.data.transfer.HCatReader in project hive by apache.
the class TestReaderWriter method runsInMaster.
private ReaderContext runsInMaster(Map<String, String> config, boolean bogus) throws HCatException {
ReadEntity entity = new ReadEntity.Builder().withTable("mytbl").build();
HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
ReaderContext cntxt = reader.prepareRead();
return cntxt;
}
use of org.apache.hive.hcatalog.data.transfer.HCatReader in project hive by apache.
the class DataReaderMaster method runsInMaster.
private static ReaderContext runsInMaster(Map<String, String> config) throws HCatException {
ReadEntity.Builder builder = new ReadEntity.Builder();
ReadEntity entity = builder.withTable(config.get("table")).build();
HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
ReaderContext cntxt = reader.prepareRead();
return cntxt;
}
use of org.apache.hive.hcatalog.data.transfer.HCatReader in project hive by apache.
the class DataReaderSlave method main.
public static void main(String[] args) throws IOException, ClassNotFoundException {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[0])));
ReaderContext cntxt = (ReaderContext) ois.readObject();
ois.close();
String[] inpSlitsToRead = args[1].split(",");
List<InputSplit> splits = cntxt.getSplits();
for (int i = 0; i < inpSlitsToRead.length; i++) {
InputSplit split = splits.get(Integer.parseInt(inpSlitsToRead[i]));
HCatReader reader = DataTransferFactory.getHCatReader(split, cntxt.getConf());
Iterator<HCatRecord> itr = reader.read();
File f = new File(args[2] + "-" + i);
f.delete();
BufferedWriter outFile = new BufferedWriter(new FileWriter(f));
while (itr.hasNext()) {
String rec = itr.next().toString().replaceFirst("\\s+$", "");
System.err.println(rec);
outFile.write(rec + "\n");
}
outFile.close();
}
}
Aggregations