use of org.voltdb.VoltType in project voltdb by VoltDB.
the class PerPartitionTable method buildTable.
private PartitionProcedureCallback buildTable() {
ArrayList<VoltBulkLoaderRow> buf = new ArrayList<VoltBulkLoaderRow>(m_minBatchTriggerSize);
m_partitionRowQueue.drainTo(buf, m_minBatchTriggerSize);
ListIterator<VoltBulkLoaderRow> it = buf.listIterator();
while (it.hasNext()) {
VoltBulkLoaderRow currRow = it.next();
VoltBulkLoader loader = currRow.m_loader;
Object[] row_args;
row_args = new Object[currRow.m_rowData.length];
try {
for (int i = 0; i < row_args.length; i++) {
final VoltType type = m_columnTypes[i];
row_args[i] = ParameterConverter.tryToMakeCompatible(type.classFromType(), currRow.m_rowData[i]);
}
} catch (VoltTypeException e) {
loader.generateError(currRow.m_rowHandle, currRow.m_rowData, e.getMessage());
loader.m_outstandingRowCount.decrementAndGet();
it.remove();
continue;
}
m_table.addRow(row_args);
}
return new PartitionProcedureCallback(buf);
}
use of org.voltdb.VoltType in project voltdb by VoltDB.
the class DDLCompiler method addTableToCatalog.
private void addTableToCatalog(Database db, VoltXMLElement node, boolean isXDCR) throws VoltCompilerException {
assert node.name.equals("table");
// Construct table-specific maps
HashMap<String, Column> columnMap = new HashMap<>();
HashMap<String, Index> indexMap = new HashMap<>();
final String name = node.attributes.get("name");
// create a table node in the catalog
final Table table = db.getTables().add(name);
// set max value before return for view table
table.setTuplelimit(Integer.MAX_VALUE);
// add the original DDL to the table (or null if it's not there)
TableAnnotation annotation = new TableAnnotation();
table.setAnnotation(annotation);
// handle the case where this is a materialized view
final String query = node.attributes.get("query");
if (query != null) {
assert (query.length() > 0);
m_matViewMap.put(table, query);
}
final boolean isStream = (node.attributes.get("stream") != null);
final String streamTarget = node.attributes.get("export");
final String streamPartitionColumn = node.attributes.get("partitioncolumn");
// all tables start replicated
// if a partition is found in the project file later,
// then this is reversed
table.setIsreplicated(true);
// map of index replacements for later constraint fixup
final Map<String, String> indexReplacementMap = new TreeMap<>();
// Need the columnTypes sorted by column index.
SortedMap<Integer, VoltType> columnTypes = new TreeMap<>();
for (VoltXMLElement subNode : node.children) {
if (subNode.name.equals("columns")) {
int colIndex = 0;
for (VoltXMLElement columnNode : subNode.children) {
if (columnNode.name.equals("column")) {
addColumnToCatalog(table, columnNode, columnTypes, columnMap, m_compiler);
colIndex++;
}
}
// limit the total number of columns in a table
if (colIndex > MAX_COLUMNS) {
String msg = "Table " + name + " has " + colIndex + " columns (max is " + MAX_COLUMNS + ")";
throw m_compiler.new VoltCompilerException(msg);
}
}
if (subNode.name.equals("indexes")) {
// that refer to them.
for (VoltXMLElement indexNode : subNode.children) {
if (indexNode.name.equals("index") == false)
continue;
String indexName = indexNode.attributes.get("name");
if (indexName.startsWith(HSQLInterface.AUTO_GEN_IDX_PREFIX) == false) {
addIndexToCatalog(db, table, indexNode, indexReplacementMap, indexMap, columnMap, m_compiler);
}
}
for (VoltXMLElement indexNode : subNode.children) {
if (indexNode.name.equals("index") == false)
continue;
String indexName = indexNode.attributes.get("name");
if (indexName.startsWith(HSQLInterface.AUTO_GEN_IDX_PREFIX) == true) {
addIndexToCatalog(db, table, indexNode, indexReplacementMap, indexMap, columnMap, m_compiler);
}
}
}
if (subNode.name.equals("constraints")) {
for (VoltXMLElement constraintNode : subNode.children) {
if (constraintNode.name.equals("constraint")) {
addConstraintToCatalog(table, constraintNode, indexReplacementMap, indexMap);
}
}
}
}
// Warn user if DR table don't have any unique index.
if (isXDCR && node.attributes.get("drTable") != null && node.attributes.get("drTable").equalsIgnoreCase("ENABLE")) {
boolean hasUniqueIndex = false;
for (Index index : table.getIndexes()) {
if (index.getUnique()) {
hasUniqueIndex = true;
break;
}
}
if (!hasUniqueIndex) {
String info = String.format("Table %s doesn't have any unique index, it will cause full table scans to update/delete DR record and may become slower as table grow.", table.getTypeName());
m_compiler.addWarn(info);
}
}
table.setSignature(CatalogUtil.getSignatureForTable(name, columnTypes));
/*
* Validate that each variable-length column is below the max value length,
* and that the maximum size for the row is below the max row length.
*/
int maxRowSize = 0;
for (Column c : columnMap.values()) {
VoltType t = VoltType.get((byte) c.getType());
if (t == VoltType.STRING && (!c.getInbytes())) {
if (c.getSize() * MAX_BYTES_PER_UTF8_CHARACTER > VoltType.MAX_VALUE_LENGTH) {
throw m_compiler.new VoltCompilerException("Column " + name + "." + c.getName() + " specifies a maximum size of " + c.getSize() + " characters" + " but the maximum supported size is " + VoltType.humanReadableSize(VoltType.MAX_VALUE_LENGTH / MAX_BYTES_PER_UTF8_CHARACTER) + " characters or " + VoltType.humanReadableSize(VoltType.MAX_VALUE_LENGTH) + " bytes");
}
maxRowSize += 4 + c.getSize() * MAX_BYTES_PER_UTF8_CHARACTER;
} else if (t.isVariableLength()) {
if (c.getSize() > VoltType.MAX_VALUE_LENGTH) {
throw m_compiler.new VoltCompilerException("Column " + name + "." + c.getName() + " specifies a maximum size of " + c.getSize() + " bytes" + " but the maximum supported size is " + VoltType.humanReadableSize(VoltType.MAX_VALUE_LENGTH));
}
maxRowSize += 4 + c.getSize();
} else {
maxRowSize += t.getLengthInBytesForFixedTypes();
}
}
if (maxRowSize > MAX_ROW_SIZE) {
throw m_compiler.new VoltCompilerException("Error: Table " + name + " has a maximum row size of " + maxRowSize + " but the maximum supported row size is " + MAX_ROW_SIZE);
}
// the DDL statement for the VIEW
if (query != null) {
annotation.ddl = query;
} else {
// Get the final DDL for the table rebuilt from the catalog object
// Don't need a real StringBuilder or export state to get the CREATE for a table
annotation.ddl = CatalogSchemaTools.toSchema(new StringBuilder(), table, query, isStream, streamPartitionColumn, streamTarget);
}
}
use of org.voltdb.VoltType in project voltdb by VoltDB.
the class AdHocPlannedStmtBatch method mockStatementBatch.
public static AdHocPlannedStmtBatch mockStatementBatch(long replySiteId, String sql, Object[] extractedValues, VoltType[] paramTypes, Object[] userParams, int partitionParamIndex, Object[] userPartitionKey, byte[] catalogHash, boolean readOnly) {
// Mock up dummy results from the work request.
CorePlan core = new CorePlan(new byte[0], partitionParamIndex == -1 ? new byte[20] : null, new byte[20], partitionParamIndex == -1 ? new byte[20] : null, false, readOnly, paramTypes, catalogHash);
AdHocPlannedStatement s = new AdHocPlannedStatement(sql.getBytes(Constants.UTF8ENCODING), core, extractedValues == null ? ParameterSet.emptyParameterSet() : ParameterSet.fromArrayNoCopy(extractedValues), null);
List<AdHocPlannedStatement> stmts = new ArrayList<AdHocPlannedStatement>();
stmts.add(s);
VoltType partitionParamType = null;
Object partitionParamValue = null;
if (userPartitionKey != null) {
partitionParamValue = userPartitionKey[0];
} else if (partitionParamIndex > -1) {
partitionParamValue = userParams[partitionParamIndex];
}
if (partitionParamValue != null) {
partitionParamType = VoltType.typeFromObject(partitionParamValue);
}
// Finally, mock up the planned batch.
AdHocPlannedStmtBatch plannedStmtBatch = new AdHocPlannedStmtBatch(userParams, stmts, partitionParamIndex, partitionParamType, partitionParamValue, null);
return plannedStmtBatch;
}
use of org.voltdb.VoltType in project voltdb by VoltDB.
the class AdHocPlannedStmtBatch method flattenPlanArrayToBuffer.
/**
* For convenience, serialization is accomplished with this single method,
* but deserialization is piecemeal via the static methods userParamsFromBuffer
* and planArrayFromBuffer with no dummy "AdHocPlannedStmtBatch receiver" instance required.
*/
public ByteBuffer flattenPlanArrayToBuffer() throws IOException {
// sizeof batch
int size = 0;
ParameterSet userParamCache = null;
if (userParamSet == null) {
userParamCache = ParameterSet.emptyParameterSet();
} else {
Object[] typedUserParams = new Object[userParamSet.length];
int ii = 0;
for (AdHocPlannedStatement cs : plannedStatements) {
for (VoltType paramType : cs.core.parameterTypes) {
if (ii >= typedUserParams.length) {
String errorMsg = "Too few actual arguments were passed for the parameters in the sql statement(s): (" + typedUserParams.length + " vs. " + ii + ")";
// Volt-TYPE-Exception is slightly cheating, here, should there be a more general VoltArgumentException?
throw new VoltTypeException(errorMsg);
}
typedUserParams[ii] = ParameterConverter.tryToMakeCompatible(paramType.classFromType(), userParamSet[ii]);
// System.out.println("DEBUG typed parameter: " + work.userParamSet[ii] +
// "using type: " + paramType + "as: " + typedUserParams[ii]);
ii++;
}
}
// exactly once in userParams.
if (ii < typedUserParams.length) {
// Volt-TYPE-Exception is slightly cheating, here, should there be a more general VoltArgumentException?
String errorMsg = "Too many actual arguments were passed for the parameters in the sql statement(s): (" + typedUserParams.length + " vs. " + ii + ")";
throw new VoltTypeException(errorMsg);
}
userParamCache = ParameterSet.fromArrayNoCopy(typedUserParams);
}
size += userParamCache.getSerializedSize();
// sizeof batch
size += 2;
for (AdHocPlannedStatement cs : plannedStatements) {
size += cs.getSerializedSize();
}
ByteBuffer buf = ByteBuffer.allocate(size);
userParamCache.flattenToBuffer(buf);
buf.putShort((short) plannedStatements.size());
for (AdHocPlannedStatement cs : plannedStatements) {
cs.flattenToBuffer(buf);
}
return buf;
}
use of org.voltdb.VoltType in project voltdb by VoltDB.
the class JDBC4ResultSet method getBigDecimal.
// Retrieves the value of the designated column in the current row of this
// ResultSet object as a java.math.BigDecimal with full precision.
@Override
public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
checkColumnBounds(columnIndex);
try {
final VoltType type = table.getColumnType(columnIndex - 1);
BigDecimal decimalValue = null;
switch(type) {
case TINYINT:
decimalValue = new BigDecimal(table.getLong(columnIndex - 1));
break;
case SMALLINT:
decimalValue = new BigDecimal(table.getLong(columnIndex - 1));
break;
case INTEGER:
decimalValue = new BigDecimal(table.getLong(columnIndex - 1));
break;
case BIGINT:
decimalValue = new BigDecimal(table.getLong(columnIndex - 1));
break;
case FLOAT:
decimalValue = new BigDecimal(table.getDouble(columnIndex - 1));
break;
case DECIMAL:
decimalValue = table.getDecimalAsBigDecimal(columnIndex - 1);
break;
default:
throw new IllegalArgumentException("Cannot get BigDecimal value for column type '" + type + "'");
}
return table.wasNull() ? null : decimalValue;
} catch (Exception x) {
throw SQLError.get(x);
}
}
Aggregations