use of org.apache.cassandra.thrift.ColumnOrSuperColumn in project eiger by wlloyd.
the class CassandraStorage method putNext.
public void putNext(Tuple t) throws ExecException, IOException {
ByteBuffer key = objToBB(t.get(0));
DefaultDataBag pairs = (DefaultDataBag) t.get(1);
ArrayList<Mutation> mutationList = new ArrayList<Mutation>();
CfDef cfDef = getCfDef(storeSignature);
try {
for (Tuple pair : pairs) {
Mutation mutation = new Mutation();
if (// supercolumn
DataType.findType(pair.get(1)) == DataType.BAG) {
org.apache.cassandra.thrift.SuperColumn sc = new org.apache.cassandra.thrift.SuperColumn();
sc.name = objToBB(pair.get(0));
ArrayList<org.apache.cassandra.thrift.Column> columns = new ArrayList<org.apache.cassandra.thrift.Column>();
for (Tuple subcol : (DefaultDataBag) pair.get(1)) {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(subcol.get(0));
column.value = objToBB(subcol.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
columns.add(column);
}
if (// a deletion
columns.isEmpty()) {
mutation.deletion = new Deletion();
mutation.deletion.super_column = objToBB(pair.get(0));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
sc.columns = columns;
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.super_column = sc;
}
} else // assume column since it couldn't be anything else
{
if (pair.get(1) == null) {
mutation.deletion = new Deletion();
mutation.deletion.predicate = new org.apache.cassandra.thrift.SlicePredicate();
mutation.deletion.predicate.column_names = Arrays.asList(objToBB(pair.get(0)));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(pair.get(0));
column.value = objToBB(pair.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.column = column;
}
}
mutationList.add(mutation);
}
} catch (ClassCastException e) {
throw new IOException(e + " Output must be (key, {(column,value)...}) for ColumnFamily or (key, {supercolumn:{(column,value)...}...}) for SuperColumnFamily", e);
}
try {
writer.write(key, mutationList);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.apache.cassandra.thrift.ColumnOrSuperColumn in project eiger by wlloyd.
the class ColumnOrSuperColumnHelper method extractEvtAndLvt.
public static EvtAndLvt extractEvtAndLvt(ColumnOrSuperColumn cosc) {
EvtAndLvt result = null;
if (cosc.isSetColumn()) {
result = new EvtAndLvt(cosc.column.earliest_valid_time, cosc.column.latest_valid_time);
} else if (cosc.isSetCounter_column()) {
result = new EvtAndLvt(cosc.counter_column.earliest_valid_time, cosc.counter_column.latest_valid_time);
} else if (cosc.isSetSuper_column()) {
long maxEarliestValidTime = Long.MIN_VALUE;
long minLatestValidTime = Long.MAX_VALUE;
for (Column column : cosc.super_column.columns) {
maxEarliestValidTime = Math.max(maxEarliestValidTime, column.earliest_valid_time);
minLatestValidTime = Math.min(minLatestValidTime, column.latest_valid_time);
}
result = new EvtAndLvt(maxEarliestValidTime, minLatestValidTime);
} else {
long maxEarliestValidTime = Long.MIN_VALUE;
long minLatestValidTime = Long.MAX_VALUE;
for (CounterColumn column : cosc.counter_super_column.columns) {
maxEarliestValidTime = Math.max(maxEarliestValidTime, column.earliest_valid_time);
minLatestValidTime = Math.min(minLatestValidTime, column.latest_valid_time);
}
result = new EvtAndLvt(maxEarliestValidTime, minLatestValidTime);
}
logger.trace("extractEVTandLVT(" + toString(cosc) + ")=" + result);
return result;
}
use of org.apache.cassandra.thrift.ColumnOrSuperColumn in project brisk by riptano.
the class CassandraStorage method putNext.
public void putNext(Tuple t) throws ExecException, IOException {
ByteBuffer key = objToBB(t.get(0));
DefaultDataBag pairs = (DefaultDataBag) t.get(1);
ArrayList<Mutation> mutationList = new ArrayList<Mutation>();
CfDef cfDef = getCfDef();
List<AbstractType> marshallers = getDefaultMarshallers(cfDef);
Map<ByteBuffer, AbstractType> validators = getValidatorMap(cfDef);
try {
for (Tuple pair : pairs) {
Mutation mutation = new Mutation();
if (// supercolumn
DataType.findType(pair.get(1)) == DataType.BAG) {
org.apache.cassandra.thrift.SuperColumn sc = new org.apache.cassandra.thrift.SuperColumn();
sc.name = objToBB(pair.get(0));
ArrayList<org.apache.cassandra.thrift.Column> columns = new ArrayList<org.apache.cassandra.thrift.Column>();
for (Tuple subcol : (DefaultDataBag) pair.get(1)) {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = objToBB(subcol.get(0));
column.value = objToBB(subcol.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
columns.add(column);
}
if (// a deletion
columns.isEmpty()) {
mutation.deletion = new Deletion();
mutation.deletion.super_column = objToBB(pair.get(0));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
sc.columns = columns;
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.super_column = sc;
}
} else // assume column since it couldn't be anything else
{
if (pair.get(1) == null) {
mutation.deletion = new Deletion();
mutation.deletion.predicate = new org.apache.cassandra.thrift.SlicePredicate();
mutation.deletion.predicate.column_names = Arrays.asList(objToBB(pair.get(0)));
mutation.deletion.setTimestamp(System.currentTimeMillis() * 1000);
} else {
org.apache.cassandra.thrift.Column column = new org.apache.cassandra.thrift.Column();
column.name = marshallers.get(0).decompose((pair.get(0)));
if (validators.get(column.name) == null)
// Have to special case BytesType to convert DataByteArray into ByteBuffer
if (marshallers.get(1) instanceof BytesType)
column.value = objToBB(pair.get(1));
else
column.value = marshallers.get(1).decompose(pair.get(1));
else
column.value = validators.get(column.name).decompose(pair.get(1));
column.setTimestamp(System.currentTimeMillis() * 1000);
mutation.column_or_supercolumn = new ColumnOrSuperColumn();
mutation.column_or_supercolumn.column = column;
}
}
mutationList.add(mutation);
}
} catch (ClassCastException e) {
throw new IOException(e + " Output must be (key, {(column,value)...}) for ColumnFamily or (key, {supercolumn:{(column,value)...}...}) for SuperColumnFamily");
}
try {
writer.write(key, mutationList);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.apache.cassandra.thrift.ColumnOrSuperColumn in project scale7-pelops by s7.
the class ColumnOrSuperColumnHelper method transform.
public static <T> List<T> transform(List<ColumnOrSuperColumn> coscList, FieldAdapter<T> fieldAdapter) {
List<T> result = new ArrayList<T>(coscList.size());
for (ColumnOrSuperColumn cosc : coscList) {
T element = fieldAdapter.getValue(cosc);
assert element != null : "The " + fieldAdapter.description + " value should not be null";
result.add(element);
}
return result;
}
use of org.apache.cassandra.thrift.ColumnOrSuperColumn in project eiger by wlloyd.
the class FacebookPopulator method getColumnsMutationMap.
private Map<String, List<Mutation>> getColumnsMutationMap(List<Column> columns) {
List<Mutation> mutations = new ArrayList<Mutation>();
Map<String, List<Mutation>> mutationMap = new HashMap<String, List<Mutation>>();
for (Column c : columns) {
ColumnOrSuperColumn column = new ColumnOrSuperColumn().setColumn(c);
mutations.add(new Mutation().setColumn_or_supercolumn(column));
}
mutationMap.put("Standard1", mutations);
return mutationMap;
}
Aggregations