Search in sources :

Example 1 with RangeBuilder

use of com.netflix.astyanax.util.RangeBuilder in project coprhd-controller by CoprHD.

the class UserToOrdersMigration method process.

@Override
public void process() throws MigrationCallbackException {
    log.info("Adding new index records for class: {} field: {} annotation: {} name={}", new Object[] { Order.class, Order.SUBMITTED_BY_USER_ID, ClassNameTimeSeries.class.getName(), name });
    DataObjectType doType = TypeMap.getDoType(Order.class);
    ColumnField field = doType.getColumnField(Order.SUBMITTED_BY_USER_ID);
    newIndexCF = field.getIndexCF();
    ColumnFamily<String, IndexColumnName> userToOrders = new ColumnFamily<>(SOURCE_INDEX_CF_NAME, StringSerializer.get(), IndexColumnNameSerializer.get());
    DbClientImpl client = (DbClientImpl) dbClient;
    ks = client.getKeyspace(Order.class);
    MutationBatch mutationBatch = ks.prepareMutationBatch();
    long m = 0;
    try {
        OperationResult<Rows<String, IndexColumnName>> result = ks.prepareQuery(userToOrders).getAllRows().setRowLimit(1000).withColumnRange(new RangeBuilder().setLimit(0).build()).execute();
        ColumnList<IndexColumnName> cols;
        for (Row<String, IndexColumnName> row : result.getResult()) {
            RowQuery<String, IndexColumnName> rowQuery = ks.prepareQuery(userToOrders).getKey(row.getKey()).autoPaginate(true).withColumnRange(new RangeBuilder().setLimit(5).build());
            while (!(cols = rowQuery.execute().getResult()).isEmpty()) {
                m++;
                for (Column<IndexColumnName> col : cols) {
                    String indexKey = row.getKey();
                    String orderId = col.getName().getTwo();
                    ClassNameTimeSeriesIndexColumnName newCol = new ClassNameTimeSeriesIndexColumnName(col.getName().getOne(), orderId, col.getName().getTimeUUID());
                    mutationBatch.withRow(newIndexCF, indexKey).putEmptyColumn(newCol, null);
                    if (m % 10000 == 0) {
                        mutationBatch.execute();
                    }
                }
            }
        }
        mutationBatch.execute();
    } catch (Exception e) {
        log.error("Migration to {} failed e=", newIndexCF.getName(), e);
    }
}
Also used : Order(com.emc.storageos.db.client.model.uimodels.Order) ColumnField(com.emc.storageos.db.client.impl.ColumnField) RangeBuilder(com.netflix.astyanax.util.RangeBuilder) ClassNameTimeSeriesIndexColumnName(com.emc.storageos.db.client.impl.ClassNameTimeSeriesIndexColumnName) MigrationCallbackException(com.emc.storageos.svcs.errorhandling.resources.MigrationCallbackException) ColumnFamily(com.netflix.astyanax.model.ColumnFamily) ClassNameTimeSeries(com.emc.storageos.db.client.model.ClassNameTimeSeries) ClassNameTimeSeriesIndexColumnName(com.emc.storageos.db.client.impl.ClassNameTimeSeriesIndexColumnName) IndexColumnName(com.emc.storageos.db.client.impl.IndexColumnName) MutationBatch(com.netflix.astyanax.MutationBatch) DbClientImpl(com.emc.storageos.db.client.impl.DbClientImpl) DataObjectType(com.emc.storageos.db.client.impl.DataObjectType) Rows(com.netflix.astyanax.model.Rows)

Example 2 with RangeBuilder

use of com.netflix.astyanax.util.RangeBuilder in project coprhd-controller by CoprHD.

the class TimeSeriesType method getColumnRange.

/**
 * Return column range for given time and bucket granularity
 *
 * @param time target query time
 * @param granularity granularity
 * @param pageSize page size
 * @return
 */
public ByteBufferRange getColumnRange(DateTime time, TimeBucket granularity, int pageSize) {
    if (time.getZone() != DateTimeZone.UTC) {
        throw new IllegalArgumentException("Invalid timezone");
    }
    if (granularity.ordinal() > _bucketGranularity.ordinal()) {
        throw new IllegalArgumentException("Invalid granularity");
    }
    RangeBuilder builder = new RangeBuilder();
    builder.setLimit(pageSize);
    if (granularity.ordinal() < _bucketGranularity.ordinal()) {
        // finer than specified granularity
        DateTime start = DateTime.now();
        DateTime end = DateTime.now();
        switch(granularity) {
            case MONTH:
                start = new DateTime(time.getYear(), time.getMonthOfYear(), 1, 0, 0, DateTimeZone.UTC);
                end = start.plusMonths(1);
                break;
            case DAY:
                start = new DateTime(time.getYear(), time.getMonthOfYear(), time.getDayOfMonth(), 0, 0, DateTimeZone.UTC);
                end = start.plusDays(1);
                break;
            case HOUR:
                start = new DateTime(time.getYear(), time.getMonthOfYear(), time.getDayOfMonth(), time.getHourOfDay(), 0, DateTimeZone.UTC);
                end = start.plusHours(1);
                break;
            case MINUTE:
                start = new DateTime(time.getYear(), time.getMonthOfYear(), time.getDayOfMonth(), time.getHourOfDay(), time.getMinuteOfHour(), DateTimeZone.UTC);
                end = start.plusMinutes(1);
                break;
            case SECOND:
                start = new DateTime(time.getYear(), time.getMonthOfYear(), time.getDayOfMonth(), time.getHourOfDay(), time.getMinuteOfHour(), time.getSecondOfMinute(), DateTimeZone.UTC);
                end = start.plusSeconds(1);
                break;
        }
        builder.setStart(TimeUUIDUtils.getTimeUUID(start.getMillis()));
        builder.setEnd(createMaxTimeUUID(end.minusMillis(1).getMillis()));
    }
    return builder.build();
}
Also used : RangeBuilder(com.netflix.astyanax.util.RangeBuilder) DateTime(org.joda.time.DateTime)

Example 3 with RangeBuilder

use of com.netflix.astyanax.util.RangeBuilder in project coprhd-controller by CoprHD.

the class CustomizedDistributedRowLock method readLockColumns.

/**
 * Read all the lock columns. Will also ready data columns if withDataColumns(true) was called
 *
 * @param readDataColumns
 * @throws Exception
 */
private Map<String, Long> readLockColumns(boolean readDataColumns) throws Exception {
    Map<String, Long> result = Maps.newLinkedHashMap();
    ConsistencyLevel read_consistencyLevel = consistencyLevel;
    // CASSANDRA actually does not support EACH_QUORUM for read which is meaningless as well.
    if (consistencyLevel == ConsistencyLevel.CL_EACH_QUORUM) {
        read_consistencyLevel = ConsistencyLevel.CL_LOCAL_QUORUM;
    }
    // Read all the columns
    if (readDataColumns) {
        columns = new OrderedColumnMap<String>();
        ColumnList<String> lockResult = keyspace.prepareQuery(columnFamily).setConsistencyLevel(read_consistencyLevel).getKey(key).execute().getResult();
        for (Column<String> c : lockResult) {
            if (c.getName().startsWith(prefix)) {
                result.put(c.getName(), readTimeoutValue(c));
            } else {
                columns.add(c);
            }
        }
    } else // Read only the lock columns
    {
        ColumnList<String> lockResult = keyspace.prepareQuery(columnFamily).setConsistencyLevel(read_consistencyLevel).getKey(key).withColumnRange(new RangeBuilder().setStart(prefix + "\u0000").setEnd(prefix + "\uFFFF").build()).execute().getResult();
        for (Column<String> c : lockResult) {
            result.put(c.getName(), readTimeoutValue(c));
        }
    }
    return result;
}
Also used : ConsistencyLevel(com.netflix.astyanax.model.ConsistencyLevel) RangeBuilder(com.netflix.astyanax.util.RangeBuilder)

Example 4 with RangeBuilder

use of com.netflix.astyanax.util.RangeBuilder in project coprhd-controller by CoprHD.

the class TimeSeriesIndexMigration method process.

@Override
public void process() throws MigrationCallbackException {
    log.info("Adding new index records for class: {} field: {} annotation: {}", new Object[] { Order.class.getName(), Order.SUBMITTED, TimeSeriesAlternateId.class.getName() });
    ColumnFamily<String, IndexColumnName> tenantToOrder = new ColumnFamily<>(SOURCE_INDEX_CF_NAME, StringSerializer.get(), IndexColumnNameSerializer.get());
    ColumnFamily<String, IndexColumnName> timeseriesIndex = new ColumnFamily<>(SOURCE_INDEX_CF_NAME2, StringSerializer.get(), IndexColumnNameSerializer.get());
    DataObjectType doType = TypeMap.getDoType(Order.class);
    ColumnField field = doType.getColumnField(Order.SUBMITTED);
    ColumnFamily<String, TimeSeriesIndexColumnName> newIndexCF = field.getIndexCF();
    DbClientImpl client = (DbClientImpl) dbClient;
    Keyspace ks = client.getKeyspace(Order.class);
    List<CompletableFuture<Void>> tasks = new ArrayList(TASK_SIZE);
    try {
        OperationResult<Rows<String, IndexColumnName>> result = ks.prepareQuery(tenantToOrder).getAllRows().setRowLimit(1000).withColumnRange(new RangeBuilder().setLimit(0).build()).execute();
        for (Row<String, IndexColumnName> row : result.getResult()) {
            RowQuery<String, IndexColumnName> rowQuery = ks.prepareQuery(tenantToOrder).getKey(row.getKey()).autoPaginate(true).withColumnRange(new RangeBuilder().setLimit(5).build());
            ColumnList<IndexColumnName> cols = rowQuery.execute().getResult();
            while (!cols.isEmpty()) {
                if (tasks.size() < TASK_SIZE) {
                    CompletableFuture<Void> task = CompletableFuture.runAsync(new MigrationTask(ks, row.getKey(), cols, tenantToOrder, timeseriesIndex, newIndexCF));
                    tasks.add(task);
                } else {
                    CompletableFuture.allOf(tasks.toArray(new CompletableFuture[0])).join();
                    tasks = new ArrayList(TASK_SIZE);
                }
                cols = rowQuery.execute().getResult();
            }
        }
        if (!tasks.isEmpty()) {
            CompletableFuture.allOf(tasks.toArray(new CompletableFuture[0])).join();
        }
    } catch (Exception e) {
        log.error("Migration to {} failed e=", newIndexCF.getName(), e);
    }
}
Also used : RangeBuilder(com.netflix.astyanax.util.RangeBuilder) CompositeRangeBuilder(com.netflix.astyanax.serializers.CompositeRangeBuilder) ArrayList(java.util.ArrayList) CompletableFuture(java.util.concurrent.CompletableFuture) Order(com.emc.storageos.db.client.model.uimodels.Order) MigrationCallbackException(com.emc.storageos.svcs.errorhandling.resources.MigrationCallbackException) ConnectionException(com.netflix.astyanax.connectionpool.exceptions.ConnectionException) TimeSeriesAlternateId(com.emc.storageos.db.client.model.TimeSeriesAlternateId) Keyspace(com.netflix.astyanax.Keyspace)

Example 5 with RangeBuilder

use of com.netflix.astyanax.util.RangeBuilder in project coprhd-controller by CoprHD.

the class DbConsistencyCheckerHelper method checkIndexingCF.

/**
 * Scan all the indices and related data object records, to find out
 * the index record is existing but the related data object records is missing.
 *
 * @return number of the corrupted rows in this index CF
 * @throws ConnectionException
 */
public void checkIndexingCF(IndexAndCf indexAndCf, boolean toConsole, CheckResult checkResult, boolean isParallel) throws ConnectionException {
    initSchemaVersions();
    String indexCFName = indexAndCf.cf.getName();
    Map<String, ColumnFamily<String, CompositeColumnName>> objCfs = getDataObjectCFs();
    _log.info("Start checking the index CF {} with double confirmed option: {}", indexCFName, doubleConfirmed);
    Map<ColumnFamily<String, CompositeColumnName>, Map<String, List<IndexEntry>>> objsToCheck = new HashMap<>();
    ColumnFamilyQuery<String, IndexColumnName> query = indexAndCf.keyspace.prepareQuery(indexAndCf.cf);
    OperationResult<Rows<String, IndexColumnName>> result = query.getAllRows().setRowLimit(dbClient.DEFAULT_PAGE_SIZE).withColumnRange(new RangeBuilder().setLimit(0).build()).execute();
    int scannedRows = 0;
    long beginTime = System.currentTimeMillis();
    for (Row<String, IndexColumnName> row : result.getResult()) {
        RowQuery<String, IndexColumnName> rowQuery = indexAndCf.keyspace.prepareQuery(indexAndCf.cf).getKey(row.getKey()).autoPaginate(true).withColumnRange(new RangeBuilder().setLimit(dbClient.DEFAULT_PAGE_SIZE).build());
        ColumnList<IndexColumnName> columns;
        while (!(columns = rowQuery.execute().getResult()).isEmpty()) {
            for (Column<IndexColumnName> column : columns) {
                scannedRows++;
                ObjectEntry objEntry = extractObjectEntryFromIndex(row.getKey(), column.getName(), indexAndCf.indexType, toConsole);
                if (objEntry == null) {
                    continue;
                }
                ColumnFamily<String, CompositeColumnName> objCf = objCfs.get(objEntry.getClassName());
                if (objCf == null) {
                    logMessage(String.format("DataObject does not exist for %s", row.getKey()), true, toConsole);
                    continue;
                }
                if (skipCheckCFs.contains(objCf.getName())) {
                    _log.debug("Skip checking CF {} for index CF {}", objCf.getName(), indexAndCf.cf.getName());
                    continue;
                }
                Map<String, List<IndexEntry>> objKeysIdxEntryMap = objsToCheck.get(objCf);
                if (objKeysIdxEntryMap == null) {
                    objKeysIdxEntryMap = new HashMap<>();
                    objsToCheck.put(objCf, objKeysIdxEntryMap);
                }
                List<IndexEntry> idxEntries = objKeysIdxEntryMap.get(objEntry.getObjectId());
                if (idxEntries == null) {
                    idxEntries = new ArrayList<>();
                    objKeysIdxEntryMap.put(objEntry.getObjectId(), idxEntries);
                }
                idxEntries.add(new IndexEntry(row.getKey(), column.getName()));
            }
            int size = getObjsSize(objsToCheck);
            if (size >= INDEX_OBJECTS_BATCH_SIZE) {
                if (isParallel) {
                    processBatchIndexObjectsWithMultipleThreads(indexAndCf, toConsole, objsToCheck, checkResult);
                } else {
                    processBatchIndexObjects(indexAndCf, toConsole, objsToCheck, checkResult);
                }
                objsToCheck = new HashMap<>();
            }
            if (scannedRows >= THRESHHOLD_FOR_OUTPUT_DEBUG) {
                _log.info("{} data objects have been check with time {}", scannedRows, DurationFormatUtils.formatDurationHMS(System.currentTimeMillis() - beginTime));
                scannedRows = 0;
                beginTime = System.currentTimeMillis();
            }
        }
    }
    // Detect whether the DataObject CFs have the records
    if (isParallel) {
        processBatchIndexObjectsWithMultipleThreads(indexAndCf, toConsole, objsToCheck, checkResult);
    } else {
        processBatchIndexObjects(indexAndCf, toConsole, objsToCheck, checkResult);
    }
}
Also used : HashMap(java.util.HashMap) RangeBuilder(com.netflix.astyanax.util.RangeBuilder) ColumnFamily(com.netflix.astyanax.model.ColumnFamily) ColumnList(com.netflix.astyanax.model.ColumnList) List(java.util.List) ArrayList(java.util.ArrayList) Rows(com.netflix.astyanax.model.Rows) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap)

Aggregations

RangeBuilder (com.netflix.astyanax.util.RangeBuilder)5 Order (com.emc.storageos.db.client.model.uimodels.Order)2 MigrationCallbackException (com.emc.storageos.svcs.errorhandling.resources.MigrationCallbackException)2 ColumnFamily (com.netflix.astyanax.model.ColumnFamily)2 Rows (com.netflix.astyanax.model.Rows)2 ArrayList (java.util.ArrayList)2 ClassNameTimeSeriesIndexColumnName (com.emc.storageos.db.client.impl.ClassNameTimeSeriesIndexColumnName)1 ColumnField (com.emc.storageos.db.client.impl.ColumnField)1 DataObjectType (com.emc.storageos.db.client.impl.DataObjectType)1 DbClientImpl (com.emc.storageos.db.client.impl.DbClientImpl)1 IndexColumnName (com.emc.storageos.db.client.impl.IndexColumnName)1 ClassNameTimeSeries (com.emc.storageos.db.client.model.ClassNameTimeSeries)1 TimeSeriesAlternateId (com.emc.storageos.db.client.model.TimeSeriesAlternateId)1 Keyspace (com.netflix.astyanax.Keyspace)1 MutationBatch (com.netflix.astyanax.MutationBatch)1 ConnectionException (com.netflix.astyanax.connectionpool.exceptions.ConnectionException)1 ColumnList (com.netflix.astyanax.model.ColumnList)1 ConsistencyLevel (com.netflix.astyanax.model.ConsistencyLevel)1 CompositeRangeBuilder (com.netflix.astyanax.serializers.CompositeRangeBuilder)1 HashMap (java.util.HashMap)1