Search in sources :

Example 41 with AbstractType

use of org.apache.cassandra.db.marshal.AbstractType in project cassandra by apache.

the class DropAggregateStatement method apply.

public Keyspaces apply(Keyspaces schema) {
    String name = argumentsSpeficied ? format("%s.%s(%s)", keyspaceName, aggregateName, join(", ", transform(arguments, CQL3Type.Raw::toString))) : format("%s.%s", keyspaceName, aggregateName);
    KeyspaceMetadata keyspace = schema.getNullable(keyspaceName);
    if (null == keyspace) {
        if (ifExists)
            return schema;
        throw ire("Aggregate '%s' doesn't exist", name);
    }
    Collection<Function> aggregates = keyspace.functions.get(new FunctionName(keyspaceName, aggregateName));
    if (aggregates.size() > 1 && !argumentsSpeficied) {
        throw ire("'DROP AGGREGATE %s' matches multiple function definitions; " + "specify the argument types by issuing a statement like " + "'DROP AGGREGATE %s (type, type, ...)'. You can use cqlsh " + "'DESCRIBE AGGREGATE %s' command to find all overloads", aggregateName, aggregateName, aggregateName);
    }
    arguments.stream().filter(raw -> !raw.isTuple() && raw.isFrozen()).findFirst().ifPresent(t -> {
        throw ire("Argument '%s' cannot be frozen; remove frozen<> modifier from '%s'", t, t);
    });
    List<AbstractType<?>> argumentTypes = prepareArgumentTypes(keyspace.types);
    Predicate<Function> filter = Functions.Filter.UDA;
    if (argumentsSpeficied)
        filter = filter.and(f -> Functions.typesMatch(f.argTypes(), argumentTypes));
    Function aggregate = aggregates.stream().filter(filter).findAny().orElse(null);
    if (null == aggregate) {
        if (ifExists)
            return schema;
        throw ire("Aggregate '%s' doesn't exist", name);
    }
    return schema.withAddedOrUpdated(keyspace.withSwapped(keyspace.functions.without(aggregate)));
}
Also used : CQL3Type(org.apache.cassandra.cql3.CQL3Type) Function(org.apache.cassandra.cql3.functions.Function) FunctionName(org.apache.cassandra.cql3.functions.FunctionName) AbstractType(org.apache.cassandra.db.marshal.AbstractType)

Example 42 with AbstractType

use of org.apache.cassandra.db.marshal.AbstractType in project cassandra by apache.

the class CassandraGenerators method createColumnDefinition.

private static ColumnMetadata createColumnDefinition(String ks, String table, ColumnMetadata.Kind kind, Set<String> createdColumnNames, /* This is mutated to check for collisions, so has a side effect outside of normal random generation */
RandomnessSource rnd) {
    Gen<AbstractType<?>> typeGen = AbstractTypeGenerators.typeGen();
    switch(kind) {
        // empty type is also not supported, so filter out
        case PARTITION_KEY:
        case CLUSTERING:
            typeGen = Generators.filter(typeGen, t -> t != EmptyType.instance).map(AbstractType::freeze);
            break;
    }
    if (kind == ColumnMetadata.Kind.CLUSTERING) {
        // when working on a clustering column, add in reversed types periodically
        typeGen = allowReversed(typeGen);
    }
    // filter for unique names
    String str;
    while (!createdColumnNames.add(str = IDENTIFIER_GEN.generate(rnd))) {
    }
    ColumnIdentifier name = new ColumnIdentifier(str, true);
    int position = !kind.isPrimaryKeyKind() ? -1 : (int) rnd.next(Constraint.between(0, 30));
    return new ColumnMetadata(ks, table, name, typeGen.generate(rnd), position, kind);
}
Also used : ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) AbstractType(org.apache.cassandra.db.marshal.AbstractType) ColumnIdentifier(org.apache.cassandra.cql3.ColumnIdentifier) Constraint(org.quicktheories.impl.Constraint)

Example 43 with AbstractType

use of org.apache.cassandra.db.marshal.AbstractType in project cassandra by apache.

the class SSTableHeaderFixTest method variousDroppedUserTypes.

@Test
public void variousDroppedUserTypes() throws Exception {
    File dir = temporaryFolder;
    TableMetadata.Builder cols = TableMetadata.builder("ks", "cf").addPartitionKeyColumn("pk", udtPK).addClusteringColumn("ck", udtCK);
    ColSpec[] colSpecs = new ColSpec[] { // 'frozen<udt>' / live
    new ColSpec("frozen_udt_as_frozen_udt_live", makeUDT2("frozen_udt_as_frozen_udt_live", false), makeUDT2("frozen_udt_as_frozen_udt_live", false), false, false), // 'frozen<udt>' / live / as 'udt'
    new ColSpec("frozen_udt_as_unfrozen_udt_live", makeUDT2("frozen_udt_as_unfrozen_udt_live", false), makeUDT2("frozen_udt_as_unfrozen_udt_live", true), false, true), // 'frozen<udt>' / dropped
    new ColSpec("frozen_udt_as_frozen_udt_dropped", makeUDT2("frozen_udt_as_frozen_udt_dropped", true).freezeNestedMulticellTypes().freeze().expandUserTypes(), makeUDT2("frozen_udt_as_frozen_udt_dropped", false), makeUDT2("frozen_udt_as_frozen_udt_dropped", false), true, false), // 'frozen<udt>' / dropped / as 'udt'
    new ColSpec("frozen_udt_as_unfrozen_udt_dropped", makeUDT2("frozen_udt_as_unfrozen_udt_dropped", true).freezeNestedMulticellTypes().freeze().expandUserTypes(), makeUDT2("frozen_udt_as_unfrozen_udt_dropped", true), makeUDT2("frozen_udt_as_unfrozen_udt_dropped", false), true, true), // 'udt' / live
    new ColSpec("unfrozen_udt_as_unfrozen_udt_live", makeUDT2("unfrozen_udt_as_unfrozen_udt_live", true), makeUDT2("unfrozen_udt_as_unfrozen_udt_live", true), false, false), // 'frozen<tuple>' as 'TupleType(multiCell=false' (there is nothing like 'FrozenType(TupleType(')
    new ColSpec("frozen_tuple_as_frozen_tuple_live", makeTupleSimple(), makeTupleSimple(), false, false), // 'frozen<tuple>' as 'TupleType(multiCell=false' (there is nothing like 'FrozenType(TupleType(')
    new ColSpec("frozen_tuple_as_frozen_tuple_dropped", makeTupleSimple(), makeTupleSimple(), true, false) };
    Arrays.stream(colSpecs).forEach(c -> cols.addRegularColumn(c.name, // use the initial column type for the serialization header header.
    c.preFix));
    Map<String, ColSpec> colSpecMap = Arrays.stream(colSpecs).collect(Collectors.toMap(c -> c.name, c -> c));
    File sstable = buildFakeSSTable(dir, 1, cols, c -> {
        ColSpec cs = colSpecMap.get(c.name.toString());
        if (cs == null)
            return c;
        // update the column type in the schema to the "correct" one.
        return c.withNewType(cs.schema);
    });
    Arrays.stream(colSpecs).filter(c -> c.dropped).forEach(c -> {
        ColumnMetadata cd = getColDef(c.name);
        tableMetadata = tableMetadata.unbuild().removeRegularOrStaticColumn(cd.name).recordColumnDrop(cd, FBUtilities.timestampMicros()).build();
    });
    SerializationHeader.Component header = readHeader(sstable);
    for (ColSpec colSpec : colSpecs) {
        AbstractType<?> hdrType = header.getRegularColumns().get(ByteBufferUtil.bytes(colSpec.name));
        assertEquals(colSpec.name, colSpec.preFix, hdrType);
        assertEquals(colSpec.name, colSpec.preFix.isMultiCell(), hdrType.isMultiCell());
    }
    SSTableHeaderFix headerFix = builder().withPath(sstable.toPath()).build();
    headerFix.execute();
    assertFalse(headerFix.hasError());
    assertTrue(headerFix.hasChanges());
    // Verify that all columns to fix are in the updatedColumns set (paranoid, yet)
    Arrays.stream(colSpecs).filter(c -> c.mustFix).forEach(c -> assertTrue("expect " + c.name + " to be updated, but was not (" + updatedColumns + ")", updatedColumns.contains(c.name)));
    // Verify that the number of updated columns maches the expected number of columns to fix
    assertEquals(Arrays.stream(colSpecs).filter(c -> c.mustFix).count(), updatedColumns.size());
    header = readHeader(sstable);
    for (ColSpec colSpec : colSpecs) {
        AbstractType<?> hdrType = header.getRegularColumns().get(ByteBufferUtil.bytes(colSpec.name));
        assertEquals(colSpec.name, colSpec.expect, hdrType);
        assertEquals(colSpec.name, colSpec.expect.isMultiCell(), hdrType.isMultiCell());
    }
}
Also used : TableMetadata(org.apache.cassandra.schema.TableMetadata) Arrays(java.util.Arrays) IndexTarget(org.apache.cassandra.cql3.statements.schema.IndexTarget) File(org.apache.cassandra.io.util.File) AbstractType(org.apache.cassandra.db.marshal.AbstractType) ByteBuffer(java.nio.ByteBuffer) Matcher(java.util.regex.Matcher) Map(java.util.Map) After(org.junit.After) SSTableFormat(org.apache.cassandra.io.sstable.format.SSTableFormat) Assert.fail(org.junit.Assert.fail) DatabaseDescriptor(org.apache.cassandra.config.DatabaseDescriptor) CollectionType(org.apache.cassandra.db.marshal.CollectionType) CompositeType(org.apache.cassandra.db.marshal.CompositeType) SequentialWriter(org.apache.cassandra.io.util.SequentialWriter) Version(org.apache.cassandra.io.sstable.format.Version) FBUtilities(org.apache.cassandra.utils.FBUtilities) MetadataType(org.apache.cassandra.io.sstable.metadata.MetadataType) Set(java.util.Set) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) List(java.util.List) ColumnIdentifier(org.apache.cassandra.cql3.ColumnIdentifier) FloatType(org.apache.cassandra.db.marshal.FloatType) FileUtils(org.apache.cassandra.io.util.FileUtils) Assert.assertFalse(org.junit.Assert.assertFalse) FrozenType(org.apache.cassandra.db.marshal.FrozenType) TableMetadata(org.apache.cassandra.schema.TableMetadata) Pattern(java.util.regex.Pattern) IntStream(java.util.stream.IntStream) ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) SetType(org.apache.cassandra.db.marshal.SetType) FieldIdentifier(org.apache.cassandra.cql3.FieldIdentifier) Function(java.util.function.Function) HashSet(java.util.HashSet) Int32Type(org.apache.cassandra.db.marshal.Int32Type) ListType(org.apache.cassandra.db.marshal.ListType) UTF8Type(org.apache.cassandra.db.marshal.UTF8Type) AbstractCompositeType(org.apache.cassandra.db.marshal.AbstractCompositeType) TupleType(org.apache.cassandra.db.marshal.TupleType) BigFormat(org.apache.cassandra.io.sstable.format.big.BigFormat) SerializationHeader(org.apache.cassandra.db.SerializationHeader) Before(org.junit.Before) IndexMetadata(org.apache.cassandra.schema.IndexMetadata) ByteBufferUtil(org.apache.cassandra.utils.ByteBufferUtil) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) MapType(org.apache.cassandra.db.marshal.MapType) EncodingStats(org.apache.cassandra.db.rows.EncodingStats) Assert(org.junit.Assert) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) UserType(org.apache.cassandra.db.marshal.UserType) ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) SerializationHeader(org.apache.cassandra.db.SerializationHeader) File(org.apache.cassandra.io.util.File) Test(org.junit.Test)

Example 44 with AbstractType

use of org.apache.cassandra.db.marshal.AbstractType in project eiger by wlloyd.

the class SliceQueryFilter method collectReducedColumns.

public void collectReducedColumns(IColumnContainer container, Iterator<IColumn> reducedColumns, int gcBefore) {
    int liveColumns = 0;
    AbstractType comparator = container.getComparator();
    while (reducedColumns.hasNext()) {
        if (liveColumns >= count)
            break;
        IColumn column = reducedColumns.next();
        if (logger.isDebugEnabled())
            logger.debug(String.format("collecting %s of %s: %s", liveColumns, count, column.getString(comparator)));
        if (finish.remaining() > 0 && ((!reversed && comparator.compare(column.name(), finish) > 0)) || (reversed && comparator.compare(column.name(), finish) < 0))
            break;
        // only count live columns towards the `count` criteria
        if (column.isLive() && (!container.isMarkedForDelete() || column.mostRecentLiveChangeAt() > container.getMarkedForDeleteAt())) {
            liveColumns++;
        }
        // but we need to add all non-gc-able columns to the result for read repair:
        if (QueryFilter.isRelevant(column, container, gcBefore))
            container.addColumn(column);
    }
}
Also used : AbstractType(org.apache.cassandra.db.marshal.AbstractType)

Example 45 with AbstractType

use of org.apache.cassandra.db.marshal.AbstractType in project eiger by wlloyd.

the class ThriftValidation method validateFilterClauses.

// return true if index_clause contains an indexed columns with operator EQ
public static boolean validateFilterClauses(CFMetaData metadata, List<IndexExpression> index_clause) throws InvalidRequestException {
    if (isEmpty(index_clause))
        // no filter to apply
        return false;
    Set<ByteBuffer> indexedColumns = Table.open(metadata.ksName).getColumnFamilyStore(metadata.cfName).indexManager.getIndexedColumns();
    AbstractType nameValidator = ColumnFamily.getComparatorFor(metadata.ksName, metadata.cfName, null);
    boolean isIndexed = false;
    for (IndexExpression expression : index_clause) {
        try {
            nameValidator.validate(expression.column_name);
        } catch (MarshalException me) {
            throw new InvalidRequestException(String.format("[%s]=[%s] failed name validation (%s)", ByteBufferUtil.bytesToHex(expression.column_name), ByteBufferUtil.bytesToHex(expression.value), me.getMessage()));
        }
        AbstractType valueValidator = Schema.instance.getValueValidator(metadata.ksName, metadata.cfName, expression.column_name);
        try {
            valueValidator.validate(expression.value);
        } catch (MarshalException me) {
            throw new InvalidRequestException(String.format("[%s]=[%s] failed value validation (%s)", ByteBufferUtil.bytesToHex(expression.column_name), ByteBufferUtil.bytesToHex(expression.value), me.getMessage()));
        }
        isIndexed |= (expression.op == IndexOperator.EQ) && indexedColumns.contains(expression.column_name);
    }
    return isIndexed;
}
Also used : MarshalException(org.apache.cassandra.db.marshal.MarshalException) AbstractType(org.apache.cassandra.db.marshal.AbstractType) ByteBuffer(java.nio.ByteBuffer)

Aggregations

AbstractType (org.apache.cassandra.db.marshal.AbstractType)51 ByteBuffer (java.nio.ByteBuffer)20 ColumnMetadata (org.apache.cassandra.schema.ColumnMetadata)9 TableMetadata (org.apache.cassandra.schema.TableMetadata)8 List (java.util.List)6 Map (java.util.Map)6 CollectionType (org.apache.cassandra.db.marshal.CollectionType)6 CompositeType (org.apache.cassandra.db.marshal.CompositeType)6 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 Set (java.util.Set)5 Collectors (java.util.stream.Collectors)5 CQL3Type (org.apache.cassandra.cql3.CQL3Type)5 SerializationHeader (org.apache.cassandra.db.SerializationHeader)5 TupleType (org.apache.cassandra.db.marshal.TupleType)5 UserType (org.apache.cassandra.db.marshal.UserType)5 ColumnIdentifier (org.apache.cassandra.cql3.ColumnIdentifier)4 FieldIdentifier (org.apache.cassandra.cql3.FieldIdentifier)4 Test (org.junit.Test)4 AuditLogContext (org.apache.cassandra.audit.AuditLogContext)3