use of org.apache.jena.jdbc.results.metadata.columns.SparqlColumnInfo in project jena by apache.
the class SelectResultsMetadata method makeColumns.
/**
* Makes column information for SELECT results
*
* @param results
* Result Set
* @param rset
* Underlying SPARQL results
* @return Column information
* @throws SQLException
* Thrown if the column information cannot be created
*/
private static ColumnInfo[] makeColumns(JenaResultSet results, ResultSetPeekable rset) throws SQLException {
List<String> vars = rset.getResultVars();
ColumnInfo[] columns = new ColumnInfo[vars.size()];
int level = JdbcCompatibility.normalizeLevel(results.getJdbcCompatibilityLevel());
boolean columnsAsStrings = JdbcCompatibility.shouldTypeColumnsAsString(level);
boolean columnsDetected = JdbcCompatibility.shouldDetectColumnTypes(level);
Binding b = null;
if (columnsDetected) {
if (rset.hasNext()) {
b = rset.peekBinding();
} else {
// If we were supposed to detect columns but there is no data
// available then we will just fallback to typing everything as
// strings
columnsAsStrings = true;
columnsDetected = false;
}
}
for (int i = 0; i < columns.length; i++) {
if (!columnsAsStrings && !columnsDetected) {
// Low compatibility, report columns as being typed as
// JAVA_OBJECT with ARQ Node as the column class
columns[i] = new SparqlColumnInfo(vars.get(i), Types.JAVA_OBJECT, columnNullable);
LOGGER.info("Low JDBC compatibility, column " + vars.get(i) + " is being typed as Node");
} else if (columnsAsStrings) {
// Medium compatibility, report columns as being typed as
// NVARCHAR with String as the column class
columns[i] = new StringColumn(vars.get(i), columnNullable);
LOGGER.info("Medium JDBC compatibility, column " + vars.get(i) + " is being typed as String");
} else if (columnsDetected) {
// High compatibility, detect columns types based on first row
// of results
columns[i] = JdbcCompatibility.detectColumnType(vars.get(i), b.get(Var.alloc(vars.get(i))), true);
LOGGER.info("High compatibility, column " + vars.get(i) + " was detected as being of type " + columns[i].getClassName());
} else {
throw new SQLFeatureNotSupportedException("Unknown JDBC compatibility level was set");
}
}
return columns;
}
use of org.apache.jena.jdbc.results.metadata.columns.SparqlColumnInfo in project jena by apache.
the class TripleResultsMetadata method makeColumns.
/**
* Gets the columns for CONSTRUCT/DESCRIBE results
*
* @param results
* Results
* @param ts
* Underlying triples
* @param subjLabel
* Label for subject column, use {@code null} to omit the subject
* column
* @param predLabel
* Label for predicate column, use {@code null} to omit the
* predicate column
* @param objLabel
* Label for object column, use {@code null} to omit the object
* column
*
* @return Column Information
* @throws SQLException
*/
private static ColumnInfo[] makeColumns(JenaResultSet results, PeekIterator<Triple> ts, String subjLabel, String predLabel, String objLabel) throws SQLException {
int numColumns = 0;
if (subjLabel != null)
numColumns++;
if (predLabel != null)
numColumns++;
if (objLabel != null)
numColumns++;
ColumnInfo[] columns = new ColumnInfo[numColumns];
// Figure out column names
String[] names = new String[numColumns];
names[0] = subjLabel != null ? subjLabel : (predLabel != null ? predLabel : objLabel);
if (numColumns > 1) {
names[1] = subjLabel != null && predLabel != null ? predLabel : objLabel;
}
if (numColumns == 3) {
names[2] = objLabel;
}
int level = JdbcCompatibility.normalizeLevel(results.getJdbcCompatibilityLevel());
boolean columnsAsStrings = JdbcCompatibility.shouldTypeColumnsAsString(level);
boolean columnsDetected = JdbcCompatibility.shouldDetectColumnTypes(level);
Triple t = null;
Node[] values = new Node[numColumns];
if (columnsDetected) {
if (ts.hasNext()) {
// Need to peek the first Triple and grab appropriate nodes
t = ts.peek();
if (numColumns == NUM_COLUMNS) {
values[0] = t.getSubject();
values[1] = t.getPredicate();
values[2] = t.getObject();
} else {
values[0] = subjLabel != null ? t.getSubject() : (predLabel != null ? t.getPredicate() : t.getObject());
if (numColumns > 1) {
values[1] = subjLabel != null && predLabel != null ? t.getPredicate() : t.getObject();
}
}
} else {
// If we were supposed to detect columns but there is no data
// available then we will just fallback to typing everything as
// strings
columnsAsStrings = true;
columnsDetected = false;
}
}
for (int i = 0; i < columns.length; i++) {
if (!columnsAsStrings && !columnsDetected) {
// Low compatibility, report columns as being typed as
// JAVA_OBJECT with ARQ Node as the column class
columns[i] = new SparqlColumnInfo(names[i], Types.JAVA_OBJECT, columnNoNulls);
LOGGER.info("Low JDBC compatibility, column " + names[i] + " is being typed as Node");
} else if (columnsAsStrings) {
// Medium compatibility, report columns as being typed as
// NVARChar with String as the column class
columns[i] = new StringColumn(names[i], columnNoNulls);
LOGGER.info("Medium JDBC compatibility, column " + names[i] + " is being typed as String");
} else if (columnsDetected) {
// High compatibility, detect columns types based on first row
// of results
columns[i] = JdbcCompatibility.detectColumnType(names[i], values[i], false);
LOGGER.info("High compatibility, column " + names[i] + " was detected as being of type " + columns[i].getClassName());
} else {
throw new SQLFeatureNotSupportedException("Unknown JDBC compatibility level was set");
}
}
return columns;
}
Aggregations