use of org.apache.hop.core.RowMetaAndData in project hop by apache.
the class WorkflowEntryDeleteFilesTest method filesWithNoPath_AreNotProcessed_ArgsOfPreviousMeta.
@Test
public void filesWithNoPath_AreNotProcessed_ArgsOfPreviousMeta() throws Exception {
action.setArgFromPrevious(true);
Result prevMetaResult = new Result();
List<RowMetaAndData> metaAndDataList = new ArrayList<>();
metaAndDataList.add(constructRowMetaAndData(Const.EMPTY_STRING, null));
metaAndDataList.add(constructRowMetaAndData(STRING_SPACES_ONLY, null));
prevMetaResult.setRows(metaAndDataList);
action.execute(prevMetaResult, 0);
verify(action, never()).processFile(anyString(), anyString(), any(Workflow.class));
}
use of org.apache.hop.core.RowMetaAndData in project hop by apache.
the class WorkflowEntryDeleteFilesTest method filesPath_AreProcessed_ArgsOfPreviousMeta.
@Test
public void filesPath_AreProcessed_ArgsOfPreviousMeta() throws Exception {
action.setArgFromPrevious(true);
Result prevMetaResult = new Result();
List<RowMetaAndData> metaAndDataList = new ArrayList<>();
metaAndDataList.add(constructRowMetaAndData(PATH_TO_FILE, null));
prevMetaResult.setRows(metaAndDataList);
action.execute(prevMetaResult, 0);
verify(action, times(metaAndDataList.size())).processFile(anyString(), nullable(String.class), any(Workflow.class));
}
use of org.apache.hop.core.RowMetaAndData in project hop by apache.
the class ParquetInputDialog method getFields.
private void getFields() {
try {
// Ask for a file to get metadata from...
//
String filename = BaseDialog.presentFileDialog(shell, new String[] { "*.parquet*", "*.*" }, new String[] { "Parquet files", "All files" }, true);
if (filename != null) {
FileObject fileObject = HopVfs.getFileObject(filename);
long size = fileObject.getContent().getSize();
InputStream inputStream = HopVfs.getInputStream(fileObject);
// Reads the whole file into memory...
//
ByteArrayOutputStream outputStream = new ByteArrayOutputStream((int) size);
IOUtils.copy(inputStream, outputStream);
ParquetStream inputFile = new ParquetStream(outputStream.toByteArray(), filename);
// Empty list of fields to retrieve: we still grab the schema
//
ParquetReadSupport readSupport = new ParquetReadSupport(new ArrayList<>());
ParquetReader<RowMetaAndData> reader = new ParquetReaderBuilder<>(readSupport, inputFile).build();
// Read one empty row...
//
reader.read();
// Now we have the schema...
//
MessageType schema = readSupport.getMessageType();
IRowMeta rowMeta = new RowMeta();
List<ColumnDescriptor> columns = schema.getColumns();
for (ColumnDescriptor column : columns) {
String sourceField = "";
String[] path = column.getPath();
if (path.length == 1) {
sourceField = path[0];
} else {
for (int i = 0; i < path.length; i++) {
if (i > 0) {
sourceField += ".";
}
sourceField += path[i];
}
}
PrimitiveType primitiveType = column.getPrimitiveType();
int hopType = IValueMeta.TYPE_STRING;
switch(primitiveType.getPrimitiveTypeName()) {
case INT32:
case INT64:
hopType = IValueMeta.TYPE_INTEGER;
break;
case INT96:
hopType = IValueMeta.TYPE_BINARY;
break;
case FLOAT:
case DOUBLE:
hopType = IValueMeta.TYPE_NUMBER;
break;
case BOOLEAN:
hopType = IValueMeta.TYPE_BOOLEAN;
break;
}
IValueMeta valueMeta = ValueMetaFactory.createValueMeta(sourceField, hopType, -1, -1);
rowMeta.addValueMeta(valueMeta);
}
BaseTransformDialog.getFieldsFromPrevious(rowMeta, wFields, 1, new int[] { 1, 2 }, new int[] { 3 }, -1, -1, null);
}
} catch (Exception e) {
LogChannel.UI.logError("Error getting parquet file fields", e);
}
}
use of org.apache.hop.core.RowMetaAndData in project hop by apache.
the class NormaliserTest method checkResults.
private void checkResults(List<RowMetaAndData> expectedOutput, List<RowMetaAndData> outputList) {
assertEquals(expectedOutput.size(), outputList.size());
for (int i = 0; i < outputList.size(); i++) {
RowMetaAndData aRowMetaAndData = outputList.get(i);
RowMetaAndData expectedRowMetaAndData = expectedOutput.get(i);
IRowMeta rowMeta = aRowMetaAndData.getRowMeta();
IRowMeta expectedRowMeta = expectedRowMetaAndData.getRowMeta();
String[] fields = rowMeta.getFieldNames();
String[] expectedFields = expectedRowMeta.getFieldNames();
assertEquals(expectedFields.length, fields.length);
assertArrayEquals(expectedFields, fields);
Object[] aRow = aRowMetaAndData.getData();
Object[] expectedRow = expectedRowMetaAndData.getData();
assertEquals(expectedRow.length, aRow.length);
assertArrayEquals(expectedRow, aRow);
}
}
use of org.apache.hop.core.RowMetaAndData in project hop by apache.
the class HopServerSequence method getNextValue.
public synchronized long getNextValue(IVariables variables, ILoggingObject log, long incrementValue) throws HopException {
Database db = null;
try {
db = new Database(log, variables, databaseMeta);
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(variables, schemaName, tableName);
String seqField = databaseMeta.quoteField(sequenceNameField);
String valField = databaseMeta.quoteField(valueField);
boolean update = false;
String sql = "SELECT " + valField + " FROM " + schemaTable + " WHERE " + seqField + " = ?";
RowMetaAndData param = new RowMetaAndData();
param.addValue(seqField, IValueMeta.TYPE_STRING, name);
RowMetaAndData row = db.getOneRow(sql, param.getRowMeta(), param.getData());
long value;
if (row != null && row.getData() != null) {
update = true;
Long longValue = row.getInteger(0);
if (longValue == null) {
value = startValue;
} else {
value = longValue.longValue();
}
} else {
value = startValue;
}
long maximum = value + incrementValue;
//
if (update) {
sql = "UPDATE " + schemaTable + " SET " + valField + "= ? WHERE " + seqField + "= ? ";
param = new RowMetaAndData();
param.addValue(valField, IValueMeta.TYPE_INTEGER, Long.valueOf(maximum));
param.addValue(seqField, IValueMeta.TYPE_STRING, name);
} else {
sql = "INSERT INTO " + schemaTable + "(" + seqField + ", " + valField + ") VALUES( ? , ? )";
param = new RowMetaAndData();
param.addValue(seqField, IValueMeta.TYPE_STRING, name);
param.addValue(valField, IValueMeta.TYPE_INTEGER, Long.valueOf(maximum));
}
db.execStatement(sql, param.getRowMeta(), param.getData());
return value;
} catch (Exception e) {
throw new HopException("Unable to get next value for server sequence '" + name + "' on database '" + databaseMeta.getName() + "'", e);
} finally {
db.disconnect();
}
}
Aggregations