use of org.apache.hop.pipeline.transforms.injector.InjectorField in project hop by apache.
the class KafkaConsumerInputDialog method createSubPipelineMeta.
protected PipelineMeta createSubPipelineMeta() {
InjectorMeta injectorMeta = new InjectorMeta();
String[] fieldNames = getFieldNames();
int[] fieldTypes = getFieldTypes();
for (int i = 0; i < fieldNames.length; i++) {
InjectorField field = new InjectorField(fieldNames[i], ValueMetaFactory.getValueMetaName(fieldTypes[i]), "", "");
injectorMeta.getInjectorFields().add(field);
}
TransformMeta recsFromStream = new TransformMeta("RecordsFromStream", "Get messages from Kafka", injectorMeta);
recsFromStream.setLocation(new Point(100, 100));
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.addTransform(recsFromStream);
pipelineMeta.setFilename("");
return pipelineMeta;
}
use of org.apache.hop.pipeline.transforms.injector.InjectorField in project hop by apache.
the class InjectorDialog method ok.
private void ok() {
if (Utils.isEmpty(wTransformName.getText())) {
return;
}
// return value
transformName = wTransformName.getText();
input.getInjectorFields().clear();
for (TableItem item : wFields.getNonEmptyItems()) {
input.getInjectorFields().add(new InjectorField(item.getText(1), item.getText(2), item.getText(3), item.getText(4)));
}
dispose();
}
use of org.apache.hop.pipeline.transforms.injector.InjectorField in project hop by apache.
the class InjectorDialog method getData.
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
for (int i = 0; i < input.getInjectorFields().size(); i++) {
InjectorField field = input.getInjectorFields().get(i);
TableItem item = wFields.table.getItem(i);
item.setText(1, Const.NVL(field.getName(), ""));
item.setText(2, Const.NVL(field.getType(), ""));
item.setText(3, Const.NVL(field.getLength(), ""));
item.setText(4, Const.NVL(field.getPrecision(), ""));
}
wTransformName.selectAll();
wTransformName.setFocus();
}
use of org.apache.hop.pipeline.transforms.injector.InjectorField in project hop by apache.
the class AvroDecodeDialog method getFields.
private void getFields() {
try {
Map<String, Schema.Field> fieldsMap = new HashMap<>();
// If we have a source field name we can see if it's an Avro Record type with a schema...
//
String fieldName = wSourceField.getText();
if (StringUtils.isNotEmpty(fieldName)) {
IRowMeta fields = pipelineMeta.getPrevTransformFields(variables, transformName);
IValueMeta valueMeta = fields.searchValueMeta(fieldName);
if (valueMeta != null && valueMeta.getType() == IValueMeta.TYPE_AVRO) {
Schema schema = ((ValueMetaAvroRecord) valueMeta).getSchema();
if (schema != null) {
for (Schema.Field field : schema.getFields()) {
fieldsMap.put(field.name(), field);
}
}
}
}
//
if (fieldsMap.isEmpty()) {
String filename = BaseDialog.presentFileDialog(shell, new String[] { "*.avro", "*.*" }, new String[] { "Avro files", "All files" }, true);
if (filename != null) {
// Read the file
// Grab the schema
// Add all the fields to wFields
//
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName("Get Avro file details");
// We'll inject the filename to minimize dependencies
//
InjectorMeta injector = new InjectorMeta();
injector.getInjectorFields().add(new InjectorField("filename", "String", "500", "-1"));
TransformMeta injectorMeta = new TransformMeta("Filename", injector);
injectorMeta.setLocation(50, 50);
pipelineMeta.addTransform(injectorMeta);
// The Avro File Input transform
//
AvroFileInputMeta fileInput = new AvroFileInputMeta();
fileInput.setDataFilenameField("filename");
fileInput.setOutputFieldName("avro");
fileInput.setRowsLimit("1");
TransformMeta fileInputMeta = new TransformMeta("Avro", fileInput);
fileInputMeta.setLocation(250, 50);
pipelineMeta.addTransform(fileInputMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(injectorMeta, fileInputMeta));
LocalPipelineEngine pipeline = new LocalPipelineEngine(pipelineMeta, variables, loggingObject);
pipeline.setMetadataProvider(metadataProvider);
pipeline.prepareExecution();
pipeline.setPreview(true);
RowProducer rowProducer = pipeline.addRowProducer("Filename", 0);
IEngineComponent avroComponent = pipeline.findComponent("Avro", 0);
avroComponent.addRowListener(new RowAdapter() {
private boolean first = true;
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
if (first) {
first = false;
int index = rowMeta.indexOfValue("avro");
ValueMetaAvroRecord avroMeta = (ValueMetaAvroRecord) rowMeta.getValueMeta(index);
Object avroValue = row[index];
try {
GenericRecord genericRecord = avroMeta.getGenericRecord(avroValue);
Schema schema = genericRecord.getSchema();
List<Schema.Field> fields = schema.getFields();
for (Schema.Field field : fields) {
fieldsMap.put(field.name(), field);
}
} catch (Exception e) {
throw new HopTransformException(e);
}
}
}
});
pipeline.startThreads();
rowProducer.putRow(new RowMetaBuilder().addString("filename").build(), new Object[] { variables.resolve(filename) });
rowProducer.finished();
pipeline.waitUntilFinished();
}
}
if (fieldsMap.isEmpty()) {
// Sorry, we can't do anything...
return;
}
List<String> names = new ArrayList<>(fieldsMap.keySet());
names.sort(Comparator.comparing(String::toLowerCase));
for (String name : names) {
Schema.Field field = fieldsMap.get(name);
String typeDesc = StringUtil.initCap(field.schema().getType().name().toLowerCase());
int hopType = AvroDecode.getStandardHopType(field);
String hopTypeDesc = ValueMetaFactory.getValueMetaName(hopType);
TableItem item = new TableItem(wFields.table, SWT.NONE);
item.setText(1, Const.NVL(field.name(), ""));
item.setText(2, typeDesc);
item.setText(3, Const.NVL(field.name(), ""));
item.setText(4, hopTypeDesc);
}
wFields.optimizeTableView();
} catch (Exception e) {
new ErrorDialog(shell, "Error", "Error getting fields", e);
}
}
use of org.apache.hop.pipeline.transforms.injector.InjectorField in project hop by apache.
the class PipelineMetaModifier method handleInputDataSet.
private void handleInputDataSet(ILogChannel log, PipelineUnitTestSetLocation inputLocation, PipelineUnitTest unitTest, PipelineMeta pipelineMeta, TransformMeta transformMeta, IHopMetadataProvider metadataProvider) throws HopException {
String inputSetName = inputLocation.getDataSetName();
if (log.isDetailed()) {
log.logDetailed("Replacing transform '" + transformMeta.getName() + "' with an Injector for dataset '" + inputSetName + "'");
}
DataSet dataSet;
try {
dataSet = metadataProvider.getSerializer(DataSet.class).load(inputSetName);
} catch (HopException e) {
throw new HopException("Unable to load data set '" + inputSetName + "'");
}
// OK, this transform needs to be replaced by an Injector transform...
// Which fields do we need to use?
//
final IRowMeta transformFields = DataSetConst.getTransformOutputFields(dataSet, inputLocation);
if (log.isDetailed()) {
log.logDetailed("Input Data Set '" + inputSetName + "' Injector fields : '" + transformFields.toString());
}
InjectorMeta injectorMeta = new InjectorMeta();
for (IValueMeta valueMeta : transformFields.getValueMetaList()) {
injectorMeta.getInjectorFields().add(new InjectorField(valueMeta.getName(), valueMeta.getTypeDesc(), Integer.toString(valueMeta.getLength()), Integer.toString(valueMeta.getPrecision())));
}
// Only the transform metadata, type...
//
transformMeta.setTransform(injectorMeta);
transformMeta.setTransformPluginId(PluginRegistry.getInstance().getPluginId(TransformPluginType.class, injectorMeta));
}
Aggregations