Search in sources :

Example 11 with CFMetaData

use of org.apache.cassandra.config.CFMetaData in project eiger by wlloyd.

the class ThriftValidationTest method testColumnNameEqualToKeyAlias.

@Test
public void testColumnNameEqualToKeyAlias() {
    CFMetaData metaData = Schema.instance.getCFMetaData("Keyspace1", "Standard1");
    CfDef newMetadata = metaData.toThrift();
    boolean gotException = false;
    // add a key_alias = "id"
    newMetadata.setKey_alias(AsciiType.instance.decompose("id"));
    // should not throw IRE here
    try {
        ThriftValidation.validateCfDef(newMetadata, metaData);
    } catch (InvalidRequestException e) {
        gotException = true;
    }
    assert !gotException : "got unexpected InvalidRequestException";
    // add a column with name = "id"
    newMetadata.addToColumn_metadata(new ColumnDef(UTF8Type.instance.decompose("id"), "org.apache.cassandra.db.marshal.UTF8Type"));
    gotException = false;
    try {
        ThriftValidation.validateCfDef(newMetadata, metaData);
    } catch (InvalidRequestException e) {
        gotException = true;
    }
    assert gotException : "expected InvalidRequestException but not received.";
}
Also used : CFMetaData(org.apache.cassandra.config.CFMetaData) Test(org.junit.Test)

Example 12 with CFMetaData

use of org.apache.cassandra.config.CFMetaData in project brisk by riptano.

the class SchemaManagerService method buildTable.

private Table buildTable(CfDef cfDef) {
    Table table = new Table();
    table.setDbName(cfDef.keyspace);
    table.setTableName(cfDef.name);
    table.setTableType(TableType.EXTERNAL_TABLE.toString());
    table.putToParameters("EXTERNAL", "TRUE");
    table.putToParameters("cassandra.ks.name", cfDef.keyspace);
    table.putToParameters("cassandra.cf.name", cfDef.name);
    table.putToParameters("cassandra.slice.predicate.size", "100");
    table.putToParameters("storage_handler", "org.apache.hadoop.hive.cassandra.CassandraStorageHandler");
    table.setPartitionKeys(new ArrayList<FieldSchema>());
    // cassandra.column.mapping
    StorageDescriptor sd = new StorageDescriptor();
    sd.setInputFormat("org.apache.hadoop.hive.cassandra.input.HiveCassandraStandardColumnInputFormat");
    sd.setOutputFormat("org.apache.hadoop.hive.cassandra.output.HiveCassandraOutputFormat");
    sd.setParameters(new HashMap<String, String>());
    try {
        sd.setLocation(warehouse.getDefaultTablePath(cfDef.keyspace, cfDef.name).toString());
    } catch (MetaException me) {
        log.error("could not build path information correctly", me);
    }
    SerDeInfo serde = new SerDeInfo();
    serde.setSerializationLib("org.apache.hadoop.hive.cassandra.serde.CassandraColumnSerDe");
    serde.putToParameters("serialization.format", "1");
    StringBuilder mapping = new StringBuilder();
    StringBuilder validator = new StringBuilder();
    try {
        CFMetaData cfm = CFMetaData.fromThrift(cfDef);
        AbstractType keyValidator = cfDef.key_validation_class != null ? TypeParser.parse(cfDef.key_validation_class) : BytesType.instance;
        addTypeToStorageDescriptor(sd, ByteBufferUtil.bytes("row_key"), keyValidator, keyValidator);
        mapping.append(":key");
        validator.append(keyValidator.toString());
        for (ColumnDef column : cfDef.getColumn_metadata()) {
            addTypeToStorageDescriptor(sd, column.name, TypeParser.parse(cfDef.comparator_type), TypeParser.parse(column.getValidation_class()));
            try {
                mapping.append(",");
                mapping.append(ByteBufferUtil.string(column.name));
                validator.append(",");
                validator.append(column.getValidation_class());
            } catch (CharacterCodingException e) {
                log.error("could not build column mapping correctly", e);
            }
        }
        serde.putToParameters("cassandra.columns.mapping", mapping.toString());
        serde.putToParameters("cassandra.cf.validatorType", validator.toString());
        sd.setSerdeInfo(serde);
    } catch (ConfigurationException ce) {
        throw new CassandraHiveMetaStoreException("Problem converting comparator type: " + cfDef.comparator_type, ce);
    } catch (InvalidRequestException ire) {
        throw new CassandraHiveMetaStoreException("Problem parsing CfDef: " + cfDef.name, ire);
    }
    table.setSd(sd);
    if (log.isDebugEnabled())
        log.debug("constructed table for CF:{} {}", cfDef.name, table.toString());
    return table;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ColumnDef(org.apache.cassandra.thrift.ColumnDef) CharacterCodingException(java.nio.charset.CharacterCodingException) ConfigurationException(org.apache.cassandra.config.ConfigurationException) AbstractType(org.apache.cassandra.db.marshal.AbstractType) CFMetaData(org.apache.cassandra.config.CFMetaData) InvalidRequestException(org.apache.cassandra.thrift.InvalidRequestException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 13 with CFMetaData

use of org.apache.cassandra.config.CFMetaData in project brisk by riptano.

the class BriskErrorServer method describe_keyspace.

public KsDef describe_keyspace(String table) throws NotFoundException, InvalidRequestException, TException {
    KSMetaData ksm = DatabaseDescriptor.getTableDefinition(table);
    if (ksm == null)
        throw new NotFoundException();
    List<CfDef> cfDefs = new ArrayList<CfDef>();
    for (CFMetaData cfm : ksm.cfMetaData().values()) cfDefs.add(CFMetaData.convertToThrift(cfm));
    KsDef ksdef = new KsDef(ksm.name, ksm.strategyClass.getName(), cfDefs);
    ksdef.setStrategy_options(ksm.strategyOptions);
    return ksdef;
}
Also used : KSMetaData(org.apache.cassandra.config.KSMetaData) CFMetaData(org.apache.cassandra.config.CFMetaData)

Example 14 with CFMetaData

use of org.apache.cassandra.config.CFMetaData in project titan by thinkaurelius.

the class CassandraEmbeddedKeyColumnValueStore method getKeySlice.

/**
     * Create a RangeSliceCommand and run it against the StorageProxy.
     * <p>
     * To match the behavior of the standard Cassandra thrift API endpoint, the
     * {@code nowMillis} argument should be the number of milliseconds since the
     * UNIX Epoch (e.g. System.currentTimeMillis() or equivalent obtained
     * through a {@link TimestampProvider}). This is per
     * {@link org.apache.cassandra.thrift.CassandraServer#get_range_slices(ColumnParent, SlicePredicate, KeyRange, ConsistencyLevel)},
     * which passes the server's System.currentTimeMillis() to the
     * {@code RangeSliceCommand} constructor.
     */
private List<Row> getKeySlice(Token start, Token end, @Nullable SliceQuery sliceQuery, int pageSize, long nowMillis) throws BackendException {
    IPartitioner partitioner = StorageService.getPartitioner();
    SliceRange columnSlice = new SliceRange();
    if (sliceQuery == null) {
        columnSlice.setStart(ArrayUtils.EMPTY_BYTE_ARRAY).setFinish(ArrayUtils.EMPTY_BYTE_ARRAY).setCount(5);
    } else {
        columnSlice.setStart(sliceQuery.getSliceStart().asByteBuffer()).setFinish(sliceQuery.getSliceEnd().asByteBuffer()).setCount(sliceQuery.hasLimit() ? sliceQuery.getLimit() : Integer.MAX_VALUE);
    }
    /* Note: we need to fetch columns for each row as well to remove "range ghosts" */
    SlicePredicate predicate = new SlicePredicate().setSlice_range(columnSlice);
    RowPosition startPosition = start.minKeyBound(partitioner);
    RowPosition endPosition = end.minKeyBound(partitioner);
    List<Row> rows;
    try {
        CFMetaData cfm = Schema.instance.getCFMetaData(keyspace, columnFamily);
        IDiskAtomFilter filter = ThriftValidation.asIFilter(predicate, cfm, null);
        RangeSliceCommand cmd = new RangeSliceCommand(keyspace, columnFamily, nowMillis, filter, new Bounds<RowPosition>(startPosition, endPosition), pageSize);
        rows = StorageProxy.getRangeSlice(cmd, ConsistencyLevel.QUORUM);
    } catch (Exception e) {
        throw new PermanentBackendException(e);
    }
    return rows;
}
Also used : IDiskAtomFilter(org.apache.cassandra.db.filter.IDiskAtomFilter) SliceRange(org.apache.cassandra.thrift.SliceRange) SlicePredicate(org.apache.cassandra.thrift.SlicePredicate) CFMetaData(org.apache.cassandra.config.CFMetaData) IsBootstrappingException(org.apache.cassandra.exceptions.IsBootstrappingException) InvalidRequestException(org.apache.cassandra.exceptions.InvalidRequestException) RequestTimeoutException(org.apache.cassandra.exceptions.RequestTimeoutException) UnavailableException(org.apache.cassandra.exceptions.UnavailableException)

Example 15 with CFMetaData

use of org.apache.cassandra.config.CFMetaData in project eiger by wlloyd.

the class SSTableImport method addColumnsToCF.

/**
     * Add columns to a column family.
     *
     * @param row the columns associated with a row
     * @param superName name of the super column if any
     * @param cfamily the column family to add columns to
     */
private static void addColumnsToCF(List<?> row, ByteBuffer superName, ColumnFamily cfamily) {
    CFMetaData cfm = cfamily.metadata();
    assert cfm != null;
    for (Object c : row) {
        JsonColumn col = new JsonColumn<List>((List) c, cfm, (superName != null));
        QueryPath path = new QueryPath(cfm.cfName, superName, col.getName());
        if (col.isExpiring()) {
            cfamily.addColumn(null, new ExpiringColumn(col.getName(), col.getValue(), col.timestamp, col.ttl, col.localExpirationTime));
        } else if (col.isCounter()) {
            cfamily.addColumn(null, new CounterColumn(col.getName(), col.getValue(), col.timestamp, col.timestampOfLastDelete));
        } else if (col.isDeleted()) {
            cfamily.addTombstone(path, col.getValue(), col.timestamp);
        } else {
            cfamily.addColumn(path, col.getValue(), col.timestamp);
        }
    }
}
Also used : QueryPath(org.apache.cassandra.db.filter.QueryPath) CFMetaData(org.apache.cassandra.config.CFMetaData)

Aggregations

CFMetaData (org.apache.cassandra.config.CFMetaData)22 ByteBuffer (java.nio.ByteBuffer)4 ColumnDefinition (org.apache.cassandra.config.ColumnDefinition)3 ConfigurationException (org.apache.cassandra.config.ConfigurationException)3 AbstractType (org.apache.cassandra.db.marshal.AbstractType)3 InvalidRequestException (org.apache.cassandra.thrift.InvalidRequestException)3 Test (org.junit.Test)3 KSMetaData (org.apache.cassandra.config.KSMetaData)2 IMutation (org.apache.cassandra.db.IMutation)2 RowMutation (org.apache.cassandra.db.RowMutation)2 QueryPath (org.apache.cassandra.db.filter.QueryPath)2 File (java.io.File)1 IOException (java.io.IOException)1 CharacterCodingException (java.nio.charset.CharacterCodingException)1 Map (java.util.Map)1 SortedMap (java.util.SortedMap)1 TreeMap (java.util.TreeMap)1 UUID (java.util.UUID)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 DatabaseDescriptor (org.apache.cassandra.config.DatabaseDescriptor)1