use of org.apache.cayenne.access.jdbc.ColumnDescriptor in project cayenne by apache.
the class PrefetchProcessorJointNode method buildRowMapping.
/**
* Configures row columns mapping for this node entity.
*/
private void buildRowMapping() {
final Map<String, ColumnDescriptor> targetSource = new TreeMap<>();
// build a DB path .. find parent node that terminates the joint group...
PrefetchTreeNode jointRoot = this;
while (jointRoot.getParent() != null && !jointRoot.isDisjointPrefetch() && !jointRoot.isDisjointByIdPrefetch()) {
jointRoot = jointRoot.getParent();
}
final String prefix;
if (jointRoot != this) {
Expression objectPath = ExpressionFactory.exp(getPath(jointRoot));
ASTPath translated = (ASTPath) ((PrefetchProcessorNode) jointRoot).getResolver().getEntity().translateToDbPath(objectPath);
// make sure we do not include "db:" prefix
prefix = translated.getOperand(0) + ".";
} else {
prefix = "";
}
if (getParent() != null && !getParent().isPhantom() && getIncoming() != null && !getIncoming().getRelationship().isFlattened()) {
DbRelationship r = getIncoming().getRelationship().getDbRelationships().get(0);
for (final DbJoin join : r.getJoins()) {
appendColumn(targetSource, join.getTargetName(), prefix + join.getTargetName());
}
}
ClassDescriptor descriptor = resolver.getDescriptor();
descriptor.visitAllProperties(new PropertyVisitor() {
public boolean visitAttribute(AttributeProperty property) {
String target = property.getAttribute().getDbAttributePath();
appendColumn(targetSource, target, prefix + target);
return true;
}
public boolean visitToMany(ToManyProperty property) {
return visitRelationship(property);
}
public boolean visitToOne(ToOneProperty property) {
return visitRelationship(property);
}
private boolean visitRelationship(ArcProperty arc) {
DbRelationship dbRel = arc.getRelationship().getDbRelationships().get(0);
for (DbAttribute attribute : dbRel.getSourceAttributes()) {
String target = attribute.getName();
appendColumn(targetSource, target, prefix + target);
}
return true;
}
});
// append id columns ... (some may have been appended already via relationships)
for (String pkName : descriptor.getEntity().getPrimaryKeyNames()) {
appendColumn(targetSource, pkName, prefix + pkName);
}
// append inheritance discriminator columns...
for (ObjAttribute column : descriptor.getDiscriminatorColumns()) {
String target = column.getDbAttributePath();
appendColumn(targetSource, target, prefix + target);
}
int size = targetSource.size();
this.rowCapacity = (int) Math.ceil(size / 0.75);
this.columns = new ColumnDescriptor[size];
targetSource.values().toArray(columns);
}
use of org.apache.cayenne.access.jdbc.ColumnDescriptor in project cayenne by apache.
the class PrefetchProcessorJointNode method appendColumn.
private ColumnDescriptor appendColumn(Map<String, ColumnDescriptor> map, String name, String label) {
ColumnDescriptor column = map.get(name);
if (column == null) {
column = new ColumnDescriptor();
column.setName(name);
column.setDataRowKey(label);
map.put(name, column);
}
return column;
}
use of org.apache.cayenne.access.jdbc.ColumnDescriptor in project cayenne by apache.
the class PrefetchProcessorJointNode method rowFromFlatRow.
/**
* Returns a DataRow from the flat row.
*/
DataRow rowFromFlatRow(DataRow flatRow) {
DataRow row = new DataRow(rowCapacity);
// extract subset of flat row columns, recasting to the target keys
for (ColumnDescriptor column : columns) {
row.put(column.getName(), flatRow.get(column.getDataRowKey()));
}
// since JDBC row reader won't inject JOINED entity name, we have to
// detect it here...
ClassDescriptor descriptor = resolver.getDescriptor();
ObjEntity entity = descriptor.getEntityInheritanceTree().entityMatchingRow(row);
row.setEntityName(entity.getName());
return row;
}
use of org.apache.cayenne.access.jdbc.ColumnDescriptor in project cayenne by apache.
the class CryptoRowReaderFactoryDecorator method encryptedRowDescriptor.
protected RowDescriptor encryptedRowDescriptor(RowDescriptor descriptor, ExtendedTypeMap typeMap) {
// need to tweak the original descriptor to ensure binary columns are read as binary, eben if the plain Java
// type is not a byte[]
ColumnDescriptor[] originalColumns = descriptor.getColumns();
int len = originalColumns.length;
ExtendedType[] originalConverters = descriptor.getConverters();
ExtendedType[] encryptedConverters = new ExtendedType[len];
for (int i = 0; i < len; i++) {
DbAttribute attribute = originalColumns[i].getAttribute();
ExtendedType t = originalConverters[i];
if (attribute != null && columnMapper.isEncrypted(attribute)) {
// only char or binary columns can store encrypted data
if (TypesMapping.isBinary(attribute.getType())) {
t = typeMap.getRegisteredType(byte[].class);
} else if (TypesMapping.isCharacter(attribute.getType())) {
t = typeMap.getRegisteredType(String.class);
}
// else - warning?
}
encryptedConverters[i] = t;
}
return new RowDescriptor(originalColumns, encryptedConverters);
}
use of org.apache.cayenne.access.jdbc.ColumnDescriptor in project cayenne by apache.
the class VelocitySQLTemplateProcessor method processTemplate.
SQLStatement processTemplate(String template, SimpleNode parsedTemplate, Map<String, Object> parameters) {
List<ParameterBinding> bindings = new ArrayList<>();
List<ColumnDescriptor> results = new ArrayList<>();
parameters.put(BINDINGS_LIST_KEY, bindings);
parameters.put(RESULT_COLUMNS_LIST_KEY, results);
parameters.put(HELPER_KEY, renderingUtils);
String sql;
try {
sql = buildStatement(new VelocityContext(parameters), template, parsedTemplate);
} catch (Exception e) {
throw new CayenneRuntimeException("Error processing Velocity template", e);
}
ParameterBinding[] bindingsArray = new ParameterBinding[bindings.size()];
bindings.toArray(bindingsArray);
ColumnDescriptor[] resultsArray = new ColumnDescriptor[results.size()];
results.toArray(resultsArray);
return new SQLStatement(sql, resultsArray, bindingsArray);
}
Aggregations