use of eu.esdihumboldt.hale.common.core.io.Value in project hale by halestudio.
the class ClassificationHandler method getSourceExpressionAsCQL.
/**
* @see eu.esdihumboldt.hale.io.appschema.writer.internal.AbstractPropertyTransformationHandler#getSourceExpressionAsCQL()
*/
@Override
protected String getSourceExpressionAsCQL() {
Property source = AppSchemaMappingUtils.getSourceProperty(propertyCell);
PropertyDefinition sourceDef = source.getDefinition().getDefinition();
Property target = AppSchemaMappingUtils.getTargetProperty(propertyCell);
PropertyDefinition targetDef = target.getDefinition().getDefinition();
String sourceName = source.getDefinition().getDefinition().getName().getLocalPart();
ListMultimap<String, ParameterValue> parameters = propertyCell.getTransformationParameters();
LookupTable lookup = ClassificationMappingUtil.getClassificationLookup(parameters, new ServiceManager(ServiceManager.SCOPE_PROJECT));
if (lookup == null) {
log.warn("No classification specified");
return "''";
} else {
String cqlTemplate = "if_then_else(in(%s), Recode(%s,%s), %s)";
// build args to Recode function
StringBuilder recodeArgsBuilder = new StringBuilder();
Map<Value, Value> valueMap = lookup.asMap();
int counter = 0;
for (Value sourceValue : valueMap.keySet()) {
Value targetValue = valueMap.get(sourceValue);
String sourceLiteral = asCqlLiteral(sourceDef, sourceValue.as(String.class));
String targetLiteral = asCqlLiteral(targetDef, targetValue.as(String.class));
recodeArgsBuilder.append(sourceLiteral).append(",").append(targetLiteral);
if (counter < valueMap.size() - 1) {
recodeArgsBuilder.append(",");
}
counter++;
}
String recodeArgs = recodeArgsBuilder.toString();
// build args for in function
List<String> values = new ArrayList<String>();
for (Value v : valueMap.keySet()) {
String valueLiteral = asCqlLiteral(sourceDef, v.as(String.class));
values.add(valueLiteral);
}
values.add(0, sourceName);
String inArgs = Joiner.on(",").join(values);
// determine what to put in the "else" branch, based on
// transformation parameters
String elsePart = null;
List<ParameterValue> notClassifiedParam = parameters.get(PARAMETER_NOT_CLASSIFIED_ACTION);
String notClassifiedAction = null;
if (notClassifiedParam != null && notClassifiedParam.size() > 0) {
notClassifiedAction = notClassifiedParam.get(0).as(String.class);
} else {
notClassifiedAction = USE_NULL_ACTION;
}
if (USE_SOURCE_ACTION.equals(notClassifiedAction))
elsePart = sourceName;
else if (notClassifiedAction.startsWith(USE_FIXED_VALUE_ACTION_PREFIX))
elsePart = asCqlLiteral(targetDef, notClassifiedAction.substring(notClassifiedAction.indexOf(':') + 1));
else if (USE_NULL_ACTION.equals(notClassifiedAction))
elsePart = "Expression.NIL";
return String.format(cqlTemplate, inArgs, sourceName, recodeArgs, elsePart);
}
}
use of eu.esdihumboldt.hale.common.core.io.Value in project hale by halestudio.
the class FunctionExecutor method convert.
/**
* Convert a value according to a target property entity definition.
*
* @param value the value to convert
* @param propertyEntityDefinition the target property entity definition
* @return the converted object
* @throws ConversionException if an error occurs during conversion
*/
private Object convert(Object value, PropertyEntityDefinition propertyEntityDefinition) throws ConversionException {
if (value == null) {
return null;
}
PropertyDefinition def = propertyEntityDefinition.getDefinition();
Binding binding = def.getPropertyType().getConstraint(Binding.class);
Class<?> target = binding.getBinding();
// special handling for Value
if (value instanceof Value) {
// try value's internal conversion
Object result = ((Value) value).as(target);
if (result != null) {
return result;
} else {
// unwrap value
value = ((Value) value).getValue();
if (value == null) {
return null;
}
}
}
if (target.isAssignableFrom(value.getClass())) {
return value;
}
if (Collection.class.isAssignableFrom(target) && target.isAssignableFrom(List.class)) {
// collection / list
ElementType elementType = def.getPropertyType().getConstraint(ElementType.class);
return ConversionUtil.getAsList(value, elementType.getBinding(), true);
}
return ConversionUtil.getAs(value, target);
}
use of eu.esdihumboldt.hale.common.core.io.Value in project hale by halestudio.
the class LookupTablePage method readLookupTable.
// read lookup table from file (specified by provider in corresponding
// wizard)
private Map<Value, Value> readLookupTable() {
Map<Value, Value> lookupTable = new HashMap<Value, Value>();
try {
LookupTableImport provider = getWizard().getProvider();
if (provider instanceof CSVLookupReader) {
DefaultCSVLookupReader reader = new DefaultCSVLookupReader();
lookupTable = reader.read(provider.getSource().getInput(), provider.getCharset(), provider.getParameter(CSVConstants.PARAM_SEPARATOR).as(String.class).charAt(0), provider.getParameter(CSVConstants.PARAM_QUOTE).as(String.class).charAt(0), provider.getParameter(CSVConstants.PARAM_ESCAPE).as(String.class).charAt(0), skip, keyColumn.getSelectionIndex(), valueColumn.getSelectionIndex());
} else {
Workbook workbook;
// write xls file
String file = provider.getSource().getLocation().getPath();
String fileExtension = file.substring(file.lastIndexOf("."), file.length());
if (fileExtension.equals(".xls")) {
workbook = new HSSFWorkbook(provider.getSource().getInput());
} else // write xlsx file
if (fileExtension.equals(".xlsx")) {
workbook = new XSSFWorkbook(provider.getSource().getInput());
} else
return new HashMap<Value, Value>();
DefaultXLSLookupTableReader reader = new DefaultXLSLookupTableReader();
lookupTable = reader.read(workbook, skip, keyColumn.getSelectionIndex(), valueColumn.getSelectionIndex(), ignoreEmptyString.getSelection());
}
} catch (IOException e) {
return lookupTable;
}
return lookupTable;
}
use of eu.esdihumboldt.hale.common.core.io.Value in project hale by halestudio.
the class LookupTablePage method addPreview.
// add table for the preview to the given composite
private void addPreview(Composite page) {
tableContainer = new Composite(page, SWT.NONE);
tableContainer.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
TableColumnLayout layout = new TableColumnLayout();
tableContainer.setLayout(layout);
tableViewer = new TableViewer(tableContainer, SWT.SINGLE | SWT.FULL_SELECTION | SWT.BORDER);
tableViewer.getTable().setLinesVisible(true);
tableViewer.getTable().setHeaderVisible(true);
tableViewer.setContentProvider(ArrayContentProvider.getInstance());
sourceColumn = new TableViewerColumn(tableViewer, SWT.NONE);
layout.setColumnData(sourceColumn.getColumn(), new ColumnWeightData(1));
sourceColumn.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
@SuppressWarnings("unchecked") Entry<Value, Value> entry = (Entry<Value, Value>) element;
return entry.getKey().getStringRepresentation();
}
});
targetColumn = new TableViewerColumn(tableViewer, SWT.NONE);
layout.setColumnData(targetColumn.getColumn(), new ColumnWeightData(1));
targetColumn.setLabelProvider(new StyledCellLabelProvider() {
@Override
public void update(ViewerCell cell) {
@SuppressWarnings("unchecked") Entry<Value, Value> entry = (Entry<Value, Value>) cell.getElement();
if (entry.getValue() == null) {
StyledString styledString = new StyledString("(unmapped)", StyledString.DECORATIONS_STYLER);
cell.setText(styledString.getString());
cell.setStyleRanges(styledString.getStyleRanges());
} else {
cell.setText(entry.getValue().getStringRepresentation());
cell.setStyleRanges(null);
}
super.update(cell);
}
});
}
use of eu.esdihumboldt.hale.common.core.io.Value in project hale by halestudio.
the class DefaultCSVLookupReader method read.
/**
* Reads a csv lookup table file. The selected columns specified by
* parameters keyColumn and valueColumn are mapped together.
*
* @param input the inputstream of the csv file
* @param charset specific charset of the csv file
* @param separator used separator char in csv file
* @param quote used quote char in csv file
* @param escape used escape char in csv file
* @param skipFirst true, if first line should be skipped
* @param keyColumn source column of the lookup table
* @param valueColumn target column of the lookup table
* @return lookup table as map
* @throws IOException if inputstream is not readable
*/
public Map<Value, Value> read(InputStream input, Charset charset, char separator, char quote, char escape, boolean skipFirst, int keyColumn, int valueColumn) throws IOException {
Reader streamReader = new BufferedReader(new InputStreamReader(input, charset));
CSVReader reader = new CSVReader(streamReader, separator, quote, escape);
String[] nextLine;
Map<Value, Value> values = new LinkedHashMap<Value, Value>();
if (skipFirst)
reader.readNext();
while ((nextLine = reader.readNext()) != null) {
if (nextLine.length >= 2)
values.put(Value.of(nextLine[keyColumn]), Value.of(nextLine[valueColumn]));
}
reader.close();
return values;
}
Aggregations