use of org.apache.cassandra.thrift.InvalidRequestException in project eiger by wlloyd.
the class AlterTableStatement method applyPropertiesToCfDef.
public static void applyPropertiesToCfDef(CfDef cfDef, CFPropDefs cfProps) throws InvalidRequestException {
if (cfProps.hasProperty(CFPropDefs.KW_COMPARATOR)) {
throw new InvalidRequestException("Can't change CF comparator after creation");
}
if (cfProps.hasProperty(CFPropDefs.KW_COMMENT)) {
cfDef.comment = new Utf8(cfProps.getProperty(CFPropDefs.KW_COMMENT));
}
if (cfProps.hasProperty(CFPropDefs.KW_DEFAULTVALIDATION)) {
try {
cfDef.default_validation_class = new Utf8(cfProps.getValidator().toString());
} catch (ConfigurationException e) {
throw new InvalidRequestException(String.format("Invalid validation type %s", cfProps.getProperty(CFPropDefs.KW_DEFAULTVALIDATION)));
}
}
cfDef.read_repair_chance = cfProps.getPropertyDouble(CFPropDefs.KW_READREPAIRCHANCE, cfDef.read_repair_chance);
cfDef.gc_grace_seconds = cfProps.getPropertyInt(CFPropDefs.KW_GCGRACESECONDS, cfDef.gc_grace_seconds);
cfDef.replicate_on_write = cfProps.getPropertyBoolean(CFPropDefs.KW_REPLICATEONWRITE, cfDef.replicate_on_write);
cfDef.min_compaction_threshold = cfProps.getPropertyInt(CFPropDefs.KW_MINCOMPACTIONTHRESHOLD, cfDef.min_compaction_threshold);
cfDef.max_compaction_threshold = cfProps.getPropertyInt(CFPropDefs.KW_MAXCOMPACTIONTHRESHOLD, cfDef.max_compaction_threshold);
if (!cfProps.compactionStrategyOptions.isEmpty()) {
cfDef.compaction_strategy_options = new HashMap<CharSequence, CharSequence>();
for (Map.Entry<String, String> entry : cfProps.compactionStrategyOptions.entrySet()) {
cfDef.compaction_strategy_options.put(new Utf8(entry.getKey()), new Utf8(entry.getValue()));
}
}
if (!cfProps.compressionParameters.isEmpty()) {
cfDef.compression_options = new HashMap<CharSequence, CharSequence>();
for (Map.Entry<String, String> entry : cfProps.compressionParameters.entrySet()) {
cfDef.compression_options.put(new Utf8(entry.getKey()), new Utf8(entry.getValue()));
}
}
}
use of org.apache.cassandra.thrift.InvalidRequestException in project logprocessing by cloudian.
the class CassandraClient method open.
public void open() throws IOException {
try {
this.currentServer = this.serverSet.get();
} catch (ServerSet.NoServersAvailableException e) {
throw new IOException("No Cassandra servers available.");
}
int splitIndex = this.currentServer.indexOf(':');
if (splitIndex == -1) {
throw new IOException("Bad host:port pair: " + this.currentServer);
}
String host = this.currentServer.substring(0, splitIndex);
int port = Integer.parseInt(this.currentServer.substring(splitIndex + 1));
TSocket sock = new TSocket(host, port);
this.transport = new TFramedTransport(sock);
TProtocol protocol = new TBinaryProtocol(transport);
this.client = new Cassandra.Client(protocol);
try {
this.transport.open();
this.client.set_keyspace(this.keyspace);
} catch (TException texc) {
throw new IOException(texc.getMessage());
} catch (InvalidRequestException exc) {
throw new IOException(exc.getMessage());
}
}
use of org.apache.cassandra.thrift.InvalidRequestException in project brisk by riptano.
the class SchemaManagerService method buildTable.
private Table buildTable(CfDef cfDef) {
Table table = new Table();
table.setDbName(cfDef.keyspace);
table.setTableName(cfDef.name);
table.setTableType(TableType.EXTERNAL_TABLE.toString());
table.putToParameters("EXTERNAL", "TRUE");
table.putToParameters("cassandra.ks.name", cfDef.keyspace);
table.putToParameters("cassandra.cf.name", cfDef.name);
table.putToParameters("cassandra.slice.predicate.size", "100");
table.putToParameters("storage_handler", "org.apache.hadoop.hive.cassandra.CassandraStorageHandler");
table.setPartitionKeys(new ArrayList<FieldSchema>());
// cassandra.column.mapping
StorageDescriptor sd = new StorageDescriptor();
sd.setInputFormat("org.apache.hadoop.hive.cassandra.input.HiveCassandraStandardColumnInputFormat");
sd.setOutputFormat("org.apache.hadoop.hive.cassandra.output.HiveCassandraOutputFormat");
sd.setParameters(new HashMap<String, String>());
try {
sd.setLocation(warehouse.getDefaultTablePath(cfDef.keyspace, cfDef.name).toString());
} catch (MetaException me) {
log.error("could not build path information correctly", me);
}
SerDeInfo serde = new SerDeInfo();
serde.setSerializationLib("org.apache.hadoop.hive.cassandra.serde.CassandraColumnSerDe");
serde.putToParameters("serialization.format", "1");
StringBuilder mapping = new StringBuilder();
StringBuilder validator = new StringBuilder();
try {
CFMetaData cfm = CFMetaData.fromThrift(cfDef);
AbstractType keyValidator = cfDef.key_validation_class != null ? TypeParser.parse(cfDef.key_validation_class) : BytesType.instance;
addTypeToStorageDescriptor(sd, ByteBufferUtil.bytes("row_key"), keyValidator, keyValidator);
mapping.append(":key");
validator.append(keyValidator.toString());
for (ColumnDef column : cfDef.getColumn_metadata()) {
addTypeToStorageDescriptor(sd, column.name, TypeParser.parse(cfDef.comparator_type), TypeParser.parse(column.getValidation_class()));
try {
mapping.append(",");
mapping.append(ByteBufferUtil.string(column.name));
validator.append(",");
validator.append(column.getValidation_class());
} catch (CharacterCodingException e) {
log.error("could not build column mapping correctly", e);
}
}
serde.putToParameters("cassandra.columns.mapping", mapping.toString());
serde.putToParameters("cassandra.cf.validatorType", validator.toString());
sd.setSerdeInfo(serde);
} catch (ConfigurationException ce) {
throw new CassandraHiveMetaStoreException("Problem converting comparator type: " + cfDef.comparator_type, ce);
} catch (InvalidRequestException ire) {
throw new CassandraHiveMetaStoreException("Problem parsing CfDef: " + cfDef.name, ire);
}
table.setSd(sd);
if (log.isDebugEnabled())
log.debug("constructed table for CF:{} {}", cfDef.name, table.toString());
return table;
}
use of org.apache.cassandra.thrift.InvalidRequestException in project eiger by wlloyd.
the class ClientState method hasColumnFamilySchemaAccess.
/**
* Confirms that the client thread has the given Permission for the ColumnFamily list of
* the current keyspace.
*/
public void hasColumnFamilySchemaAccess(Permission perm) throws InvalidRequestException {
validateLogin();
validateKeyspace();
// hardcode disallowing messing with system keyspace
if (keyspace.equalsIgnoreCase(Table.SYSTEM_TABLE) && perm == Permission.WRITE)
throw new InvalidRequestException("system keyspace is not user-modifiable");
resourceClear();
resource.add(keyspace);
Set<Permission> perms = DatabaseDescriptor.getAuthority().authorize(user, resource);
hasAccess(user, perms, perm, resource);
}
use of org.apache.cassandra.thrift.InvalidRequestException in project eiger by wlloyd.
the class CFPropDefs method validate.
public void validate() throws InvalidRequestException {
// we need to remove parent:key = value pairs from the main properties
Set<String> propsToRemove = new HashSet<String>();
// check if we have compaction/compression options
for (String property : properties.keySet()) {
if (!property.contains(":"))
continue;
String key = property.split(":")[1];
String val = properties.get(property);
if (property.startsWith(COMPACTION_OPTIONS_PREFIX)) {
compactionStrategyOptions.put(key, val);
propsToRemove.add(property);
}
if (property.startsWith(COMPRESSION_PARAMETERS_PREFIX)) {
compressionParameters.put(key, val);
propsToRemove.add(property);
}
}
for (String property : propsToRemove) properties.remove(property);
// Catch the case where someone passed a kwarg that is not recognized.
for (String bogus : Sets.difference(properties.keySet(), allowedKeywords)) throw new InvalidRequestException(bogus + " is not a valid keyword argument for CREATE COLUMNFAMILY");
for (String obsolete : Sets.intersection(properties.keySet(), obsoleteKeywords)) logger.warn("Ignoring obsolete property {}", obsolete);
// Validate min/max compaction thresholds
Integer minCompaction = getPropertyInt(KW_MINCOMPACTIONTHRESHOLD, null);
Integer maxCompaction = getPropertyInt(KW_MAXCOMPACTIONTHRESHOLD, null);
if (// Both min and max are set
(minCompaction != null) && (maxCompaction != null)) {
if ((minCompaction > maxCompaction) && (maxCompaction != 0))
throw new InvalidRequestException(String.format("%s cannot be larger than %s", KW_MINCOMPACTIONTHRESHOLD, KW_MAXCOMPACTIONTHRESHOLD));
} else if (// Only the min threshold is set
minCompaction != null) {
if (minCompaction > CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD)
throw new InvalidRequestException(String.format("%s cannot be larger than %s, (default %s)", KW_MINCOMPACTIONTHRESHOLD, KW_MAXCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD));
} else if (// Only the max threshold is set
maxCompaction != null) {
if ((maxCompaction < CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD) && (maxCompaction != 0))
throw new InvalidRequestException(String.format("%s cannot be smaller than %s, (default %s)", KW_MAXCOMPACTIONTHRESHOLD, KW_MINCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD));
}
}
Aggregations