use of com.teradata.jaqy.schema.FullColumnInfo in project jaqy by Teradata.
the class PipeImporter method setNull.
@Override
public void setNull(JaqyPreparedStatement stmt, int column, ParameterInfo paramInfo) throws Exception {
// If possible, we use the source type info since
// 1. We need to match the source type
// 2. ParamInfo may be dummy (as in case of MySQL)
FullColumnInfo info = getColumnInfo(column);
stmt.setNull(column, info.type, info.typeName);
}
use of com.teradata.jaqy.schema.FullColumnInfo in project jaqy by Teradata.
the class ResultSetMetaDataUtils method getColumnInfo.
public static FullColumnInfo getColumnInfo(ResultSetMetaData meta, int column, JaqyHelper helper) throws SQLException {
FullColumnInfo columnInfo = new FullColumnInfo();
columnInfo.autoIncrement = meta.isAutoIncrement(column);
columnInfo.caseSensitive = meta.isCaseSensitive(column);
columnInfo.searchable = meta.isSearchable(column);
columnInfo.currency = meta.isCurrency(column);
columnInfo.nullable = meta.isNullable(column);
columnInfo.signed = meta.isSigned(column);
columnInfo.displaySize = meta.getColumnDisplaySize(column);
columnInfo.label = meta.getColumnLabel(column);
columnInfo.name = meta.getColumnName(column);
columnInfo.schemaName = meta.getSchemaName(column);
columnInfo.precision = meta.getPrecision(column);
columnInfo.scale = meta.getScale(column);
columnInfo.tableName = meta.getTableName(column);
columnInfo.catalogName = meta.getCatalogName(column);
columnInfo.type = meta.getColumnType(column);
columnInfo.typeName = meta.getColumnTypeName(column);
columnInfo.className = meta.getColumnClassName(column);
columnInfo.readOnly = meta.isReadOnly(column);
columnInfo.writable = meta.isWritable(column);
columnInfo.definitelyWritable = meta.isDefinitelyWritable(column);
if (helper != null) {
helper.fixColumnInfo(columnInfo);
}
return columnInfo;
}
use of com.teradata.jaqy.schema.FullColumnInfo in project jaqy by Teradata.
the class ProjectColumnList method bind.
public void bind(JaqyResultSet rs, JaqyInterpreter interpreter) throws SQLException {
int numCols = m_columnList.size();
ExpNode[] exps = new ExpNode[numCols];
FullColumnInfo[] columnInfos = new FullColumnInfo[numCols];
ResultSetMetaData rsmd = rs.getMetaData().getMetaData();
JaqyHelper helper = rs.getHelper();
for (int i = 0; i < numCols; ++i) {
ProjectColumn column = m_columnList.get(i);
int index = rs.findColumn(column.name);
columnInfos[i] = ResultSetMetaDataUtils.getColumnInfo(rsmd, index, helper);
columnInfos[i].name = column.asName;
columnInfos[i].label = column.asName;
ColumnNode exp = new ColumnNode(column.name);
exp.bind(rs, null, interpreter);
exps[i] = exp;
}
m_project = new ExpNodeProject(exps);
m_meta = new JaqyResultSetMetaData(new InMemoryResultSetMetaData(columnInfos), DummyHelper.getInstance());
}
use of com.teradata.jaqy.schema.FullColumnInfo in project jaqy by Teradata.
the class ResultSetMetaDataUtils method copyResultSetMetaData.
public static InMemoryResultSetMetaData copyResultSetMetaData(PropertyTable pt) {
String[] titles = pt.getTitles();
int columnCount = titles.length;
FullColumnInfo[] columnInfos = new FullColumnInfo[columnCount];
int[] lengths = pt.getLengths();
for (int i = 0; i < columnCount; ++i) {
FullColumnInfo columnInfo = new FullColumnInfo();
columnInfos[i] = columnInfo;
columnInfo.autoIncrement = false;
columnInfo.caseSensitive = false;
columnInfo.searchable = false;
columnInfo.currency = false;
columnInfo.nullable = ResultSetMetaData.columnNullable;
columnInfo.signed = false;
columnInfo.displaySize = lengths[i];
columnInfo.label = titles[i];
columnInfo.name = titles[i];
columnInfo.schemaName = null;
columnInfo.precision = 0;
columnInfo.scale = 0;
columnInfo.tableName = null;
columnInfo.catalogName = null;
columnInfo.type = Types.VARCHAR;
columnInfo.typeName = "VARCHAR";
columnInfo.readOnly = true;
columnInfo.writable = false;
columnInfo.definitelyWritable = false;
columnInfo.className = "java.lang.String";
}
return new InMemoryResultSetMetaData(columnInfos);
}
use of com.teradata.jaqy.schema.FullColumnInfo in project jaqy by Teradata.
the class CSVUtils method getSchemaInfo.
public static SchemaInfo getSchemaInfo(String[] headers, Iterator<CSVRecord> iterator, String[] naValues, boolean precise, long limit) {
int count = -1;
ScanColumnType[] columns = null;
int rowCount = 0;
boolean autoStop = false;
if (limit < 0) {
limit = Long.MAX_VALUE;
autoStop = true;
} else if (limit == 0)
limit = Long.MAX_VALUE;
boolean needScan;
while (iterator.hasNext() && rowCount < limit) {
CSVRecord record = iterator.next();
++rowCount;
int size = record.size();
needScan = false;
if (count == -1) {
count = size;
columns = new ScanColumnType[count];
for (int i = 0; i < count; ++i) {
columns[i] = new ScanColumnType();
columns[i].type = Types.NULL;
columns[i].nullable = false;
columns[i].minLength = Integer.MAX_VALUE;
columns[i].maxLength = -1;
}
needScan = true;
}
for (int i = 0; i < count; ++i) {
String s = record.get(i);
boolean isNa = false;
if (naValues != null) {
for (String na : naValues) {
if (s.equals(na)) {
isNa = true;
break;
}
}
}
if (isNa) {
columns[i].nullable = true;
} else {
int len = s.length();
if (columns[i].maxLength < len)
columns[i].maxLength = len;
if (columns[i].minLength > len)
columns[i].minLength = len;
if (columns[i].type == Types.NUMERIC || columns[i].type == Types.NULL) {
try {
BigDecimal dec = new BigDecimal(s);
int precision = dec.precision();
int scale = dec.scale();
// if precision is smaller than or equal to scale, then we have leading "0."
if (precision <= scale)
precision = scale + 1;
if (columns[i].type == Types.NULL) {
columns[i].type = Types.NUMERIC;
columns[i].precision = precision;
columns[i].scale = scale;
} else {
if (columns[i].scale != scale) {
columns[i].scale = Integer.MAX_VALUE;
}
if (columns[i].precision < precision) {
columns[i].precision = precision;
}
}
++columns[i].notNullCount;
} catch (Exception ex) {
if (columns[i].minLength == columns[i].maxLength) {
// Check if we are in a fixed char column.
columns[i].type = Types.CHAR;
++columns[i].notNullCount;
} else {
columns[i].type = Types.VARCHAR;
// For varchar columns, we basically have to scan
// all the rows to find the maximum string length.
autoStop = false;
}
}
} else if (columns[i].type == Types.CHAR) {
if (columns[i].minLength == columns[i].maxLength)
++columns[i].notNullCount;
else {
columns[i].type = Types.VARCHAR;
// For varchar columns, we basically have to scan
// all the rows to find the maximum string length.
autoStop = false;
}
}
}
if (autoStop && columns[i].notNullCount < AUTO_STOP_MINIMUM) {
// For each number column, we basically need enough
// confidence to say that additional scan is not
// necessary.
needScan = true;
}
}
if (autoStop && !needScan) {
// Automatically stop if we just have numbers.
break;
}
}
if (rowCount == 0)
return null;
FullColumnInfo[] columnInfos = new FullColumnInfo[count];
for (int i = 0; i < count; ++i) {
columnInfos[i] = new FullColumnInfo();
if (headers != null) {
columnInfos[i].name = headers[i];
}
if (columnInfos[i].name == null || columnInfos[i].name.trim().length() == 0) {
columnInfos[i].name = "col" + (i + 1);
}
columnInfos[i].label = columnInfos[i].name;
columnInfos[i].nullable = columns[i].nullable ? ResultSetMetaData.columnNullable : ResultSetMetaData.columnNoNulls;
if (columns[i].type == Types.CHAR || columns[i].type == Types.VARCHAR) {
columnInfos[i].type = columns[i].type;
columnInfos[i].precision = columns[i].maxLength;
} else {
columnInfos[i].precision = columns[i].precision;
if (columns[i].scale == Integer.MAX_VALUE) {
columnInfos[i].type = Types.DOUBLE;
columnInfos[i].scale = 0;
} else if (columns[i].scale <= 0 && columns[i].precision < 11) {
columnInfos[i].type = Types.INTEGER;
columnInfos[i].scale = 0;
} else if (precise && columns[i].scale > 0) {
columnInfos[i].type = Types.DECIMAL;
columnInfos[i].scale = columns[i].scale;
} else {
columnInfos[i].type = Types.DOUBLE;
columnInfos[i].scale = 0;
}
}
}
return new SchemaInfo(columnInfos);
}
Aggregations