use of org.apache.hive.hcatalog.data.transfer.ReaderContext in project hive by apache.
the class DataReaderMaster method main.
public static void main(String[] args) throws FileNotFoundException, IOException {
// This config contains all the configuration that master node wants to provide
// to the HCatalog.
Properties externalConfigs = new Properties();
externalConfigs.load(new FileReader(args[0]));
Map<String, String> config = new HashMap<String, String>();
for (Entry<Object, Object> kv : externalConfigs.entrySet()) {
config.put((String) kv.getKey(), (String) kv.getValue());
}
// This piece of code runs in master node and gets necessary context.
ReaderContext context = runsInMaster(config);
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File(args[1])));
oos.writeObject(context);
oos.flush();
oos.close();
// Master node will serialize readercontext and will make it available at slaves.
}
use of org.apache.hive.hcatalog.data.transfer.ReaderContext in project hive by apache.
the class TestReaderWriter method test.
@Test
public void test() throws MetaException, CommandNeedRetryException, IOException, ClassNotFoundException {
driver.run("drop table mytbl");
driver.run("create table mytbl (a string, b int)");
Iterator<Entry<String, String>> itr = hiveConf.iterator();
Map<String, String> map = new HashMap<String, String>();
while (itr.hasNext()) {
Entry<String, String> kv = itr.next();
map.put(kv.getKey(), kv.getValue());
}
WriterContext cntxt = runsInMaster(map);
File writeCntxtFile = File.createTempFile("hcat-write", "temp");
writeCntxtFile.deleteOnExit();
// Serialize context.
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(writeCntxtFile));
oos.writeObject(cntxt);
oos.flush();
oos.close();
// Now, deserialize it.
ObjectInputStream ois = new ObjectInputStream(new FileInputStream(writeCntxtFile));
cntxt = (WriterContext) ois.readObject();
ois.close();
runsInSlave(cntxt);
commit(map, true, cntxt);
ReaderContext readCntxt = runsInMaster(map, false);
File readCntxtFile = File.createTempFile("hcat-read", "temp");
readCntxtFile.deleteOnExit();
oos = new ObjectOutputStream(new FileOutputStream(readCntxtFile));
oos.writeObject(readCntxt);
oos.flush();
oos.close();
ois = new ObjectInputStream(new FileInputStream(readCntxtFile));
readCntxt = (ReaderContext) ois.readObject();
ois.close();
for (int i = 0; i < readCntxt.numSplits(); i++) {
runsInSlave(readCntxt, i);
}
}
use of org.apache.hive.hcatalog.data.transfer.ReaderContext in project hive by apache.
the class TestReaderWriter method runsInMaster.
private ReaderContext runsInMaster(Map<String, String> config, boolean bogus) throws HCatException {
ReadEntity entity = new ReadEntity.Builder().withTable("mytbl").build();
HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
ReaderContext cntxt = reader.prepareRead();
return cntxt;
}
use of org.apache.hive.hcatalog.data.transfer.ReaderContext in project hive by apache.
the class DataReaderMaster method runsInMaster.
private static ReaderContext runsInMaster(Map<String, String> config) throws HCatException {
ReadEntity.Builder builder = new ReadEntity.Builder();
ReadEntity entity = builder.withTable(config.get("table")).build();
HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
ReaderContext cntxt = reader.prepareRead();
return cntxt;
}
use of org.apache.hive.hcatalog.data.transfer.ReaderContext in project hive by apache.
the class DataReaderSlave method main.
public static void main(String[] args) throws IOException, ClassNotFoundException {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[0])));
ReaderContext cntxt = (ReaderContext) ois.readObject();
ois.close();
String[] inpSlitsToRead = args[1].split(",");
List<InputSplit> splits = cntxt.getSplits();
for (int i = 0; i < inpSlitsToRead.length; i++) {
InputSplit split = splits.get(Integer.parseInt(inpSlitsToRead[i]));
HCatReader reader = DataTransferFactory.getHCatReader(split, cntxt.getConf());
Iterator<HCatRecord> itr = reader.read();
File f = new File(args[2] + "-" + i);
f.delete();
BufferedWriter outFile = new BufferedWriter(new FileWriter(f));
while (itr.hasNext()) {
String rec = itr.next().toString().replaceFirst("\\s+$", "");
System.err.println(rec);
outFile.write(rec + "\n");
}
outFile.close();
}
}
Aggregations