use of org.apache.sis.feature.builder.AttributeTypeBuilder in project sis by apache.
the class ConvertFunction method expectedType.
/**
* Provides the type of values produced by this expression when a feature of the given type is evaluated.
* May return {@code null} if the type can not be determined.
*/
@Override
public PropertyTypeBuilder expectedType(final DefaultFeatureType valueType, final FeatureTypeBuilder addTo) {
final FeatureExpression<?, ?> fex = FeatureExpression.castOrCopy(expression);
if (fex == null) {
return null;
}
final PropertyTypeBuilder p = fex.expectedType(valueType, addTo);
if (p instanceof AttributeTypeBuilder<?>) {
return ((AttributeTypeBuilder<?>) p).setValueClass(getValueClass());
}
return p;
}
use of org.apache.sis.feature.builder.AttributeTypeBuilder in project geotoolkit by Geomatys.
the class MIFManager method deriveBaseTypes.
/**
* Try to compute {@link #midType} and {@link #mifBaseType} from input feature type.
* Implementation note : We'll try to remove reserved attributes (style rules), then we'll remove geometry to build mid type.
* Finally, if input type contained a geometry, we use it to build the mif base type.
*
* @param ft The data type to analyze and decompose.
*/
private void deriveBaseTypes(final FeatureType ft) {
FeatureTypeBuilder ftb = new FeatureTypeBuilder(ft);
ftb.setName(ft.getName().toString() + "_properties");
final Iterator<PropertyTypeBuilder> it = ftb.properties().iterator();
AttributeType geometry = null;
boolean sisGeometry = false;
while (it.hasNext()) {
final PropertyTypeBuilder next = it.next();
/* We have to determine if it's a geometry we should override (generify).
* We'll also check if it's a supported type.
*/
if (next instanceof AttributeTypeBuilder) {
final AttributeTypeBuilder builder = (AttributeTypeBuilder) next;
final Class valueClass = builder.getValueClass();
if (Geometry.class.isAssignableFrom(valueClass)) {
if (geometry != null) {
throw new IllegalArgumentException("Only one geometry is accepted for mif-mid, but given type contains multiple : " + System.lineSeparator() + ft);
} else if (!Geometry.class.equals(valueClass)) {
builder.setValueClass(Geometry.class);
}
geometry = builder.build();
// Geometry property is set aside, because we'll build mid type without geometry, then use it to build mif-type with geometry.
it.remove();
} else if (MIFUtils.getColumnMIFType(valueClass) == null) {
// not supported
throw new IllegalArgumentException("MIF-MID format cannot write elements of type " + valueClass);
}
} else if (AttributeConvention.GEOMETRY_PROPERTY.equals(next.getName())) {
sisGeometry = true;
it.remove();
}
}
if (ftb.properties().size() < 1 && ftb.getSuperTypes().length == 1) {
midType = ft.getSuperTypes().iterator().next();
mifBaseType = ft;
} else {
midType = ftb.build();
if (geometry != null || sisGeometry) {
ftb = new FeatureTypeBuilder();
ftb.setSuperTypes(midType);
ftb.setName(ft.getName());
if (geometry != null) {
final AttributeTypeBuilder geomBuilder = ftb.addAttribute(geometry);
if (sisGeometry) {
geomBuilder.addRole(AttributeRole.DEFAULT_GEOMETRY);
}
} else {
ftb.addProperty(ft.getProperty(AttributeConvention.GEOMETRY));
}
mifBaseType = ftb.build();
}
}
}
use of org.apache.sis.feature.builder.AttributeTypeBuilder in project geotoolkit by Geomatys.
the class DataBaseModel method analyzeTable.
private TableMetaModel analyzeTable(final Map tableSet, final Connection cx) throws DataStoreException, SQLException {
final SQLDialect dialect = store.getDialect();
final String schemaName = (String) tableSet.get(Table.TABLE_SCHEM);
final String tableName = (String) tableSet.get(Table.TABLE_NAME);
final String tableType = (String) tableSet.get(Table.TABLE_TYPE);
final TableMetaModel table = new TableMetaModel(tableName, tableType);
final FeatureTypeBuilder ftb = new FeatureTypeBuilder();
try {
// explore all columns ----------------------------------------------
final Filter tableFilter = filter(Table.TABLE_SCHEM, schemaName, Table.TABLE_NAME, tableName);
final Iterator<Map> ite1 = cacheColumns.filter(tableFilter);
while (ite1.hasNext()) {
ftb.addAttribute(analyzeColumn(ite1.next(), cx));
}
// find primary key -------------------------------------------------
final List<ColumnMetaModel> cols = new ArrayList<>();
final Iterator<Map> pkIte = cachePrimaryKeys.filter(tableFilter);
while (pkIte.hasNext()) {
final Map result = pkIte.next();
final String columnName = (String) result.get(Column.COLUMN_NAME);
// look up the type ( should only be one row )
final Iterator<Map> cite = cacheColumns.filter(FF.and(tableFilter, FF.equal(FF.property(Column.COLUMN_NAME), FF.literal(columnName))));
final Map column = cite.next();
final int sqlType = ((Number) column.get(Column.DATA_TYPE)).intValue();
final String sqlTypeName = (String) column.get(Column.TYPE_NAME);
Class columnType = dialect.getJavaType(sqlType, sqlTypeName);
if (columnType == null) {
store.getLogger().log(Level.WARNING, "No class for sql type {0}", sqlType);
columnType = Object.class;
}
ColumnMetaModel col = null;
final String str = (String) column.get(Column.IS_AUTOINCREMENT);
if (Column.VALUE_YES.equalsIgnoreCase(str)) {
col = new ColumnMetaModel(schemaName, tableName, columnName, sqlType, sqlTypeName, columnType, Type.AUTO);
} else {
final String sequenceName = dialect.getColumnSequence(cx, schemaName, tableName, columnName);
if (sequenceName != null) {
col = new ColumnMetaModel(schemaName, tableName, columnName, sqlType, sqlTypeName, columnType, Type.SEQUENCED, sequenceName);
} else {
col = new ColumnMetaModel(schemaName, tableName, columnName, sqlType, sqlTypeName, columnType, Type.NON_INCREMENTING);
}
}
cols.add(col);
}
// Search indexes, they provide informations such as :
// - Unique indexes may indicate 1:1 relations in complexe features
// - Unique indexes can be used as primary key if no primary key are defined
final boolean pkEmpty = cols.isEmpty();
final List<String> names = new ArrayList<>();
final Map<String, List<String>> uniqueIndexes = new HashMap<>();
String indexname = null;
// we can't cache this one, seems to be a bug in the driver, it won't find anything for table name like '%'
cacheIndexInfos = new CachedResultSet(metadata.getIndexInfo(null, schemaName, tableName, true, false), Index.TABLE_SCHEM, Index.TABLE_NAME, Index.COLUMN_NAME, Index.INDEX_NAME);
final Iterator<Map> indexIte = cacheIndexInfos.filter(tableFilter);
while (indexIte.hasNext()) {
final Map result = indexIte.next();
final String columnName = (String) result.get(Index.COLUMN_NAME);
final String idxName = (String) result.get(Index.INDEX_NAME);
List<String> lst = uniqueIndexes.get(idxName);
if (lst == null) {
lst = new ArrayList<>();
uniqueIndexes.put(idxName, lst);
}
lst.add(columnName);
if (pkEmpty) {
// we must not mix with other potential indexes.
if (indexname == null) {
indexname = idxName;
} else if (!indexname.equals(idxName)) {
continue;
}
names.add(columnName);
}
}
// for each unique index composed of one column add a flag on the property descriptor
for (Entry<String, List<String>> entry : uniqueIndexes.entrySet()) {
final List<String> columns = entry.getValue();
if (columns.size() == 1) {
String columnName = columns.get(0);
for (PropertyTypeBuilder desc : ftb.properties()) {
if (desc.getName().tip().toString().equals(columnName)) {
final AttributeTypeBuilder atb = (AttributeTypeBuilder) desc;
atb.addCharacteristic(JDBCFeatureStore.JDBC_PROPERTY_UNIQUE).setDefaultValue(Boolean.TRUE);
}
}
}
}
if (pkEmpty && !names.isEmpty()) {
// build a primary key from unique index
final Iterator<Map> ite = cacheColumns.filter(tableFilter);
while (ite.hasNext()) {
final Map result = ite.next();
final String columnName = (String) result.get(Column.COLUMN_NAME);
if (!names.contains(columnName)) {
continue;
}
final int sqlType = ((Number) result.get(Column.DATA_TYPE)).intValue();
final String sqlTypeName = (String) result.get(Column.TYPE_NAME);
final Class columnType = dialect.getJavaType(sqlType, sqlTypeName);
final ColumnMetaModel col = new ColumnMetaModel(schemaName, tableName, columnName, sqlType, sqlTypeName, columnType, Type.NON_INCREMENTING);
cols.add(col);
// set the information
for (PropertyTypeBuilder desc : ftb.properties()) {
if (desc.getName().tip().toString().equals(columnName)) {
final AttributeTypeBuilder atb = (AttributeTypeBuilder) desc;
atb.addRole(AttributeRole.IDENTIFIER_COMPONENT);
break;
}
}
}
}
if (cols.isEmpty()) {
if (Table.VALUE_TYPE_TABLE.equals(tableType)) {
store.getLogger().log(Level.INFO, "No primary key found for {0}.", tableName);
}
}
table.key = new PrimaryKey(tableName, cols);
// mark primary key columns
for (PropertyTypeBuilder desc : ftb.properties()) {
for (ColumnMetaModel col : cols) {
if (desc.getName().tip().toString().equals(col.getName())) {
final AttributeTypeBuilder atb = (AttributeTypeBuilder) desc;
atb.addRole(AttributeRole.IDENTIFIER_COMPONENT);
break;
}
}
}
// find imported keys -----------------------------------------------
Iterator<Map> ite = cacheImportedKeys.filter(filter(ImportedKey.FKTABLE_SCHEM, schemaName, ImportedKey.FKTABLE_NAME, tableName));
while (ite.hasNext()) {
final Map result = ite.next();
String relationName = (String) result.get(ImportedKey.PK_NAME);
if (relationName == null)
relationName = (String) result.get(ImportedKey.FK_NAME);
final String localColumn = (String) result.get(ImportedKey.FKCOLUMN_NAME);
final String refSchemaName = (String) result.get(ImportedKey.PKTABLE_SCHEM);
final String refTableName = (String) result.get(ImportedKey.PKTABLE_NAME);
final String refColumnName = (String) result.get(ImportedKey.PKCOLUMN_NAME);
final int deleteRule = ((Number) result.get(ImportedKey.DELETE_RULE)).intValue();
final boolean deleteCascade = DatabaseMetaData.importedKeyCascade == deleteRule;
final RelationMetaModel relation = new RelationMetaModel(relationName, localColumn, refSchemaName, refTableName, refColumnName, true, deleteCascade);
table.importedKeys.add(relation);
if (refSchemaName != null && !visitedSchemas.contains(refSchemaName))
requieredSchemas.add(refSchemaName);
// set the information
for (PropertyTypeBuilder desc : ftb.properties()) {
if (desc.getName().tip().toString().equals(localColumn)) {
final AttributeTypeBuilder atb = (AttributeTypeBuilder) desc;
atb.addCharacteristic(JDBCFeatureStore.JDBC_PROPERTY_RELATION).setDefaultValue(relation);
break;
}
}
}
// find exported keys -----------------------------------------------
ite = cacheExportedKeys.filter(filter(ImportedKey.PKTABLE_SCHEM, schemaName, ImportedKey.PKTABLE_NAME, tableName));
while (ite.hasNext()) {
final Map result = ite.next();
String relationName = (String) result.get(ExportedKey.FKCOLUMN_NAME);
if (relationName == null)
relationName = (String) result.get(ExportedKey.FK_NAME);
final String localColumn = (String) result.get(ExportedKey.PKCOLUMN_NAME);
final String refSchemaName = (String) result.get(ExportedKey.FKTABLE_SCHEM);
final String refTableName = (String) result.get(ExportedKey.FKTABLE_NAME);
final String refColumnName = (String) result.get(ExportedKey.FKCOLUMN_NAME);
final int deleteRule = ((Number) result.get(ImportedKey.DELETE_RULE)).intValue();
final boolean deleteCascade = DatabaseMetaData.importedKeyCascade == deleteRule;
table.exportedKeys.add(new RelationMetaModel(relationName, localColumn, refSchemaName, refTableName, refColumnName, false, deleteCascade));
if (refSchemaName != null && !visitedSchemas.contains(refSchemaName))
requieredSchemas.add(refSchemaName);
}
// find parent table if any -----------------------------------------
// if(handleSuperTableMetadata == null || handleSuperTableMetadata){
// try{
// result = metadata.getSuperTables(null, schemaName, tableName);
// while (result.next()) {
// final String parentTable = result.getString(SuperTable.SUPERTABLE_NAME);
// table.parents.add(parentTable);
// }
// }catch(final SQLException ex){
// //not implemented by database
// handleSuperTableMetadata = Boolean.FALSE;
// store.getLogger().log(Level.INFO, "Database does not handle getSuperTable, feature type hierarchy will be ignored.");
// }finally{
// closeSafe(store.getLogger(),result);
// }
// }
} catch (SQLException e) {
throw new DataStoreException("Error occurred analyzing table : " + tableName, e);
}
ftb.setName(tableName);
table.tableType = ftb;
return table;
}
use of org.apache.sis.feature.builder.AttributeTypeBuilder in project geotoolkit by Geomatys.
the class JAXBFeatureTypeReader method reDefine.
/**
* Change property name, cardinality and nillability.
*
* @param type
* @param name
* @param minOcc
* @param maxOcc
* @param nillable
* @return
* @throws MismatchedFeatureException
*/
private PropertyType reDefine(PropertyType type, GenericName name, int minOcc, int maxOcc, boolean nillable) throws MismatchedFeatureException {
if (type instanceof AttributeType) {
final AttributeTypeBuilder atb = new FeatureTypeBuilder().addAttribute((AttributeType) type).setName(name).setMinimumOccurs(minOcc).setMaximumOccurs(maxOcc);
if (nillable) {
CharacteristicTypeBuilder cb = atb.getCharacteristic(GMLConvention.NILLABLE_PROPERTY.toString());
if (cb == null)
cb = atb.addCharacteristic(GMLConvention.NILLABLE_CHARACTERISTIC);
cb.setDefaultValue(true);
}
return atb.build();
} else if (type instanceof FeatureAssociationRole) {
final Map properties = Collections.singletonMap("name", name);
try {
FeatureType valueType = ((FeatureAssociationRole) type).getValueType();
if (nillable) {
final FeatureTypeBuilder ftb = new FeatureTypeBuilder(valueType);
ftb.addAttribute(GMLConvention.NILLABLE_CHARACTERISTIC);
valueType = ftb.build();
}
return new DefaultAssociationRole(properties, valueType, minOcc, maxOcc);
} catch (IllegalStateException ex) {
return new DefaultAssociationRole(properties, Features.getValueTypeName(type), minOcc, maxOcc);
}
} else {
throw new UnsupportedOperationException("Unexpected type " + type.getClass());
}
}
use of org.apache.sis.feature.builder.AttributeTypeBuilder in project geotoolkit by Geomatys.
the class JAXBFeatureTypeReader method toProperty.
private PropertyType toProperty(SimpleType simpleType, BuildStack stack) throws MismatchedFeatureException {
final Restriction restriction = simpleType.getRestriction();
if (restriction != null) {
QName base = restriction.getBase();
AttributeType baseType = null;
if (base != null) {
baseType = (AttributeType) resolveType(base, stack);
}
final LocalSimpleType localSimpleType = restriction.getSimpleType();
if (localSimpleType != null) {
baseType = (AttributeType) toProperty(localSimpleType, stack);
}
if (baseType != null) {
final AttributeTypeBuilder atb = new FeatureTypeBuilder().addAttribute(baseType);
for (Object facet : restriction.getFacets()) {
if (facet instanceof JAXBElement) {
String name = ((JAXBElement) facet).getName().getLocalPart();
facet = ((JAXBElement) facet).getValue();
if (facet instanceof NumFacet) {
final NumFacet nf = (NumFacet) facet;
final int length = Integer.valueOf(nf.getValue());
if ("maxLength".equalsIgnoreCase(name)) {
atb.setMaximalLength(length);
}
}
} else if (facet instanceof Pattern) {
// TODO
}
}
return atb.build();
}
}
// TODO union can be a collection of anything
// collection ? array ? Object.class ? most exact type ?
final Union union = simpleType.getUnion();
if (union != null) {
if (union.getMemberTypes() != null && !union.getMemberTypes().isEmpty()) {
final QName name = union.getMemberTypes().get(0);
final SimpleType refType = xsdContext.findSimpleType(name);
if (refType == null) {
throw new MismatchedFeatureException("Could not find type : " + name);
}
return toProperty(refType, stack);
} else if (union.getSimpleType() != null && !union.getSimpleType().isEmpty()) {
final LocalSimpleType st = union.getSimpleType().get(0);
return toProperty(st, stack);
}
}
// TODO list type
final org.geotoolkit.xsd.xml.v2001.List list = simpleType.getList();
if (list != null) {
final QName subTypeName = list.getItemType();
if (subTypeName != null) {
final SimpleType refType = xsdContext.findSimpleType(subTypeName);
if (refType != null) {
return toProperty(refType, stack);
}
return resolveType(subTypeName, stack);
}
final LocalSimpleType subtype = list.getSimpleType();
if (subtype != null) {
return toProperty(simpleType, stack);
}
}
if (Utils.existPrimitiveType(simpleType.getName())) {
return resolveType(new QName(null, simpleType.getName()), stack);
} else {
return null;
}
}
Aggregations