use of org.apache.hyracks.algebricks.data.IPrinter in project asterixdb by apache.
the class RecordWithMetaTest method runTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
public // @Test commented out due to ASTERIXDB-1881
void runTest() throws Exception {
File file = new File("target/beer.adm");
File expected = new File(getClass().getResource("/openbeerdb/beer.txt").toURI().getPath());
try {
FileUtils.deleteQuietly(file);
PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
// create key type
IAType[] keyTypes = { BuiltinType.ASTRING };
String keyName = "id";
List<String> keyNameAsList = new ArrayList<>(1);
keyNameAsList.add(keyName);
// create record type
String[] recordFieldNames = {};
IAType[] recordFieldTypes = {};
recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
// create the meta type
String[] metaFieldNames = { keyName, "flags", "expiration", "cas", "rev", "vbid", "dtype" };
IAType[] metaFieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.AINT32 };
ARecordType metaType = new ARecordType("meta", metaFieldNames, metaFieldTypes, true);
int valueIndex = 4;
char delimiter = ',';
int numOfTupleFields = 3;
int[] pkIndexes = { 0 };
int[] pkIndicators = { 1 };
List<Path> paths = new ArrayList<>();
paths.add(Paths.get(getClass().getResource("/openbeerdb/beer.csv").toURI()));
FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
// create input stream
LocalFSInputStream inputStream = new LocalFSInputStream(watcher);
// create reader record reader
Map<String, String> config = new HashMap<>();
config.put(ExternalDataConstants.KEY_HEADER, "true");
config.put(ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.DEFAULT_QUOTE);
LineRecordReader lineReader = new LineRecordReader();
lineReader.configure(inputStream, config);
// create csv with json record reader
CSVToRecordWithMetadataAndPKConverter recordConverter = new CSVToRecordWithMetadataAndPKConverter(valueIndex, delimiter, metaType, recordType, pkIndicators, pkIndexes, keyTypes);
// create the value parser <ADM in this case>
ADMDataParser valueParser = new ADMDataParser(recordType, false);
// create parser.
RecordWithMetadataParser parser = new RecordWithMetadataParser(metaType, valueParser, recordConverter);
// create serializer deserializer and printer factories
ISerializerDeserializer[] serdes = new ISerializerDeserializer[keyTypes.length + 2];
IPrinterFactory[] printerFactories = new IPrinterFactory[keyTypes.length + 2];
for (int i = 0; i < keyTypes.length; i++) {
serdes[i + 2] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(keyTypes[i]);
printerFactories[i + 2] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(keyTypes[i]);
}
serdes[0] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
serdes[1] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
printerFactories[0] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
printerFactories[1] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(metaType);
// create output descriptor
IPrinter[] printers = new IPrinter[printerFactories.length];
for (int i = 0; i < printerFactories.length; i++) {
printers[i] = printerFactories[i].createPrinter();
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
while (lineReader.hasNext()) {
IRawRecord<char[]> record = lineReader.next();
tb.reset();
parser.parse(record, tb.getDataOutput());
tb.addFieldEndOffset();
parser.parseMeta(tb.getDataOutput());
tb.addFieldEndOffset();
parser.appendLastParsedPrimaryKeyToTuple(tb);
//print tuple
printTuple(tb, printers, printStream);
}
lineReader.close();
printStream.close();
Assert.assertTrue(FileUtils.contentEquals(file, expected));
} catch (Throwable th) {
System.err.println("TEST FAILED");
th.printStackTrace();
throw th;
} finally {
FileUtils.deleteQuietly(file);
}
System.err.println("TEST PASSED.");
}
use of org.apache.hyracks.algebricks.data.IPrinter in project asterixdb by apache.
the class StringStreamingRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
final IPrinter[] printers = new IPrinter[printerFactories.length];
for (int i = 0; i < printerFactories.length; i++) {
printers[i] = printerFactories[i].createPrinter();
}
return new AbstractOneInputOneOutputOneFramePushRuntime() {
final class ForwardScriptOutput implements Runnable {
private InputStream inStream;
private ITupleParser parser;
public ForwardScriptOutput(ITupleParser parser, InputStream inStream) {
this.parser = parser;
this.inStream = inStream;
}
@Override
public void run() {
try {
parser.parse(inStream, writer);
} catch (HyracksDataException e) {
throw new RuntimeException(e);
} finally {
try {
inStream.close();
} catch (Exception e) {
}
}
}
}
final class DumpInStreamToPrintStream implements Runnable {
private BufferedReader reader;
private PrintStream printStream;
public DumpInStreamToPrintStream(InputStream inStream, PrintStream printStream) {
this.reader = new BufferedReader(new InputStreamReader(inStream));
this.printStream = printStream;
}
@Override
public void run() {
String s;
try {
while ((s = reader.readLine()) != null) {
printStream.println(s);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
printStream.close();
}
}
}
private Process process;
private PrintStream ps;
private boolean first = true;
private Thread outputPipe;
private Thread dumpStderr;
@Override
public void open() throws HyracksDataException {
if (first) {
first = false;
initAccessAppendRef(ctx);
}
try {
ITupleParser parser = parserFactory.createTupleParser(ctx);
process = Runtime.getRuntime().exec(command);
ps = new PrintStream(process.getOutputStream());
ForwardScriptOutput fso = new ForwardScriptOutput(parser, process.getInputStream());
outputPipe = new Thread(fso);
outputPipe.start();
DumpInStreamToPrintStream disps = new DumpInStreamToPrintStream(process.getErrorStream(), System.err);
dumpStderr = new Thread(disps);
dumpStderr.start();
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
for (int t = 0; t < nTuple; t++) {
tRef.reset(tAccess, t);
for (int i = 0; i < printers.length; i++) {
printers[i].print(buffer.array(), tRef.getFieldStart(i), tRef.getFieldLength(i), ps);
ps.print(fieldDelimiter);
if (i == printers.length - 1) {
ps.print('\n');
}
}
}
}
@Override
public void close() throws HyracksDataException {
// first close the printer printing to the process
ps.close();
int ret = 0;
try {
ret = process.waitFor();
outputPipe.join();
dumpStderr.join();
} catch (InterruptedException e) {
throw new HyracksDataException(e);
}
if (ret != 0) {
throw new HyracksDataException("Process exit value: " + ret);
}
// close the following operator in the chain
super.close();
}
@Override
public void flush() throws HyracksDataException {
ps.flush();
}
};
}
Aggregations