use of org.apache.cassandra.thrift.Mutation in project eiger by wlloyd.
the class CassandraStorage method putNext.
public void putNext(Tuple t) throws ExecException, IOException {
ByteBuffer key = objToBB(t.get(0));
DefaultDataBag pairs = (DefaultDataBag) t.get(1);
ArrayList<Mutation> mutationList = new ArrayList<Mutation>();
CfDef cfDef = getCfDef(storeSignature);
try {
for (Tuple pair : pairs) {
Mutation mutation = new Mutation();
if (// supercolumn
DataType.findType(pair.get(1)) == DataType.BAG) {
org.apache.cassandra.thrift.SuperColumn sc = new org.apache.cassandra.thrift.SuperColumn();
sc.name = objToBB(pair.get(0));
ArrayList<org.apache.cassandra.thrift.Column> columns = new ArrayList<org.apache.cassandra.thrift.Column>();
for (Tuple subcol : (DefaultDataBag) pair.get(1)) {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(subcol.get(0));
column.value = objToBB(subcol.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
columns.add(column);
}
if (// a deletion
columns.isEmpty()) {
mutation.deletion = new Deletion();
mutation.deletion.super_column = objToBB(pair.get(0));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
sc.columns = columns;
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.super_column = sc;
}
} else // assume column since it couldn't be anything else
{
if (pair.get(1) == null) {
mutation.deletion = new Deletion();
mutation.deletion.predicate = new org.apache.cassandra.thrift.SlicePredicate();
mutation.deletion.predicate.column_names = Arrays.asList(objToBB(pair.get(0)));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(pair.get(0));
column.value = objToBB(pair.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.column = column;
}
}
mutationList.add(mutation);
}
} catch (ClassCastException e) {
throw new IOException(e + " Output must be (key, {(column,value)...}) for ColumnFamily or (key, {supercolumn:{(column,value)...}...}) for SuperColumnFamily", e);
}
try {
writer.write(key, mutationList);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.apache.cassandra.thrift.Mutation in project eiger by wlloyd.
the class FacebookPopulator method run.
@Override
public void run(ClientLibrary clientLibrary) throws IOException {
if (values == null)
values = generateFBValues();
if (columnCountList == null)
columnCountList = generateFBColumnCounts();
// format used for keys
String format = "%0" + session.getTotalKeysLength() + "d";
String rawKey = String.format(format, index);
ByteBuffer key = ByteBufferUtil.bytes(rawKey);
List<Column> columns = new ArrayList<Column>();
int columnCount = getFBColumnCount(key);
int totalBytes = 0;
for (int i = 0; i < columnCount; i++) {
ByteBuffer value = getFBValue();
totalBytes += value.limit() - value.position();
columns.add(new Column(columnName(i, session.timeUUIDComparator)).setValue(value).setTimestamp(FBUtilities.timestampMicros()));
}
//System.out.println("Populating " + rawKey + " with " + columnCount + " columns" + " and " + totalBytes + " bytes");
Map<ByteBuffer, Map<String, List<Mutation>>> records = new HashMap<ByteBuffer, Map<String, List<Mutation>>>();
records.put(key, getColumnsMutationMap(columns));
long start = System.currentTimeMillis();
boolean success = false;
String exceptionMessage = null;
for (int t = 0; t < session.getRetryTimes(); t++) {
if (success)
break;
try {
clientLibrary.batch_mutate(records);
success = true;
} catch (Exception e) {
exceptionMessage = getExceptionMessage(e);
if (t + 1 == session.getRetryTimes()) {
e.printStackTrace();
}
success = false;
}
}
if (!success) {
error(String.format("Operation [%d] retried %d times - error inserting keys %s %s%n", index, session.getRetryTimes(), rawKey, (exceptionMessage == null) ? "" : "(" + exceptionMessage + ")"));
}
session.operations.getAndIncrement();
session.keys.getAndAdd(1);
session.columnCount.getAndAdd(1 * columnCount);
session.bytes.getAndAdd(totalBytes);
session.latency.getAndAdd(System.currentTimeMillis() - start);
}
use of org.apache.cassandra.thrift.Mutation in project brisk by riptano.
the class CassandraStorage method putNext.
public void putNext(Tuple t) throws ExecException, IOException {
ByteBuffer key = objToBB(t.get(0));
DefaultDataBag pairs = (DefaultDataBag) t.get(1);
ArrayList<Mutation> mutationList = new ArrayList<Mutation>();
CfDef cfDef = getCfDef();
List<AbstractType> marshallers = getDefaultMarshallers(cfDef);
Map<ByteBuffer, AbstractType> validators = getValidatorMap(cfDef);
try {
for (Tuple pair : pairs) {
Mutation mutation = new Mutation();
if (// supercolumn
DataType.findType(pair.get(1)) == DataType.BAG) {
org.apache.cassandra.thrift.SuperColumn sc = new org.apache.cassandra.thrift.SuperColumn();
sc.name = objToBB(pair.get(0));
ArrayList<org.apache.cassandra.thrift.Column> columns = new ArrayList<org.apache.cassandra.thrift.Column>();
for (Tuple subcol : (DefaultDataBag) pair.get(1)) {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(subcol.get(0));
column.value = objToBB(subcol.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
columns.add(column);
}
if (// a deletion
columns.isEmpty()) {
mutation.deletion = new Deletion();
mutation.deletion.super_column = objToBB(pair.get(0));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
sc.columns = columns;
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.super_column = sc;
}
} else // assume column since it couldn't be anything else
{
if (pair.get(1) == null) {
mutation.deletion = new Deletion();
mutation.deletion.predicate = new org.apache.cassandra.thrift.SlicePredicate();
mutation.deletion.predicate.column_names = Arrays.asList(objToBB(pair.get(0)));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = marshallers.get(0).decompose((pair.get(0)));
if (validators.get(column.name) == null)
// Have to special case BytesType to convert DataByteArray into ByteBuffer
if (marshallers.get(1) instanceof BytesType)
column.value = objToBB(pair.get(1));
else
column.value = marshallers.get(1).decompose(pair.get(1));
else
column.value = validators.get(column.name).decompose(pair.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.column = column;
}
}
mutationList.add(mutation);
}
} catch (ClassCastException e) {
throw new IOException(e + " Output must be (key, {(column,value)...}) for ColumnFamily or (key, {supercolumn:{(column,value)...}...}) for SuperColumnFamily");
}
try {
writer.write(key, mutationList);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.apache.cassandra.thrift.Mutation in project brisk by riptano.
the class HistoricalPriceInserter method run.
public void run(Client client) throws IOException {
// Create a stock price per day
Map<ByteBuffer, Map<String, List<Mutation>>> record = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(tickers.length);
LocalDate histDate = today.minusDays(index);
ByteBuffer histDateBuf = ByteBufferUtil.bytes(histDate.toString("yyyy-MM-dd"));
for (String stock : tickers) {
record.put(ByteBufferUtil.bytes(stock), genDaysPrices(histDateBuf));
}
long start = System.currentTimeMillis();
boolean success = false;
String exceptionMessage = null;
for (int t = 0; t < session.getRetryTimes(); t++) {
if (success)
break;
try {
client.batch_mutate(record, session.getConsistencyLevel());
success = true;
} catch (Exception e) {
exceptionMessage = getExceptionMessage(e);
success = false;
}
}
if (!success) {
error(String.format("Operation [%d] retried %d times - error inserting key %s %s%n", index, session.getRetryTimes(), histDate, (exceptionMessage == null) ? "" : "(" + exceptionMessage + ")"));
}
session.operations.getAndIncrement();
session.keys.addAndGet(tickers.length);
session.latency.getAndAdd(System.currentTimeMillis() - start);
}
use of org.apache.cassandra.thrift.Mutation in project scale7-pelops by s7.
the class Mutator method deleteColumns.
/**
* Delete a list of columns or super columns.
* @param colFamily The column family
* @param rowKey The key of the row to modify
* @param colNames The column and/or super column names to delete
*/
public Mutator deleteColumns(String colFamily, Bytes rowKey, List<Bytes> colNames) {
safeGetRowKey(rowKey);
validateColumnNames(colNames);
SlicePredicate pred = new SlicePredicate();
pred.setColumn_names(Bytes.transformBytesToList(colNames));
Deletion deletion = new Deletion();
deletion.setTimestamp(timestamp);
deletion.setPredicate(pred);
Mutation mutation = new Mutation();
mutation.setDeletion(deletion);
getMutationList(colFamily, rowKey).add(mutation);
return this;
}
Aggregations