use of me.prettyprint.hector.api.beans.Row in project logprocessing by cloudian.
the class RawCdrAccess method deleteBeforeTS.
public void deleteBeforeTS(long hourStamp) {
String sLastHour = m_sdf.format(new Date(hourStamp));
// Clean up old data from Raw tables
for (DataAccessObject dao : rawTableObjects) {
String begRangeKey = "";
boolean bMoreRows = true;
while (bMoreRows) {
List<Row<String, String, String>> rows = dao.getRangeSlice(begRangeKey, sLastHour, "", HECTOR_MAX_RECORDS, false);
if (rows.size() > 0) {
Row<String, String, String> lastRow = rows.get(rows.size() - 1);
begRangeKey = lastRow.getKey();
for (Row<String, String, String> row : rows) {
// delete entire row.
dao.delete(row.getKey(), null, StringSerializer.get());
}
if (rows.size() == 1) {
// we are done deleting
bMoreRows = false;
}
} else {
bMoreRows = false;
}
}
}
}
use of me.prettyprint.hector.api.beans.Row in project cassandra-tutorial by zznate.
the class TutorialRunner method printResults.
@SuppressWarnings("unchecked")
private static void printResults(ResultStatus result) {
log.info("+-------------------------------------------------");
log.info("| Result executed in: {} microseconds against host: {}", result.getExecutionTimeMicro(), result.getHostUsed().getName());
log.info("+-------------------------------------------------");
// nicer display of Rows vs. HColumn or ColumnSlice
if (result instanceof QueryResult) {
System.out.println(((QueryResult) result).get());
QueryResult<?> qr = (QueryResult) result;
if (qr.get() instanceof Rows) {
Rows<?, ?, ?> rows = (Rows) qr.get();
for (Row row : rows) {
log.info("| Row key: {}", row.getKey());
for (Iterator iter = row.getColumnSlice().getColumns().iterator(); iter.hasNext(); ) {
log.info("| col: {}", iter.next());
}
}
} else if (qr.get() instanceof ColumnSlice) {
for (Iterator iter = ((ColumnSlice) qr.get()).getColumns().iterator(); iter.hasNext(); ) {
log.info("| col: {}", iter.next());
}
} else {
log.info("| Result: {}", qr.get());
}
}
log.info("+-------------------------------------------------");
}
use of me.prettyprint.hector.api.beans.Row in project gora by apache.
the class CassandraStore method addSubColumns.
/**
* When we add subcolumns, Gora keys are mapped to Cassandra partition keys only.
* This is because we follow the Cassandra logic where column family data is
* partitioned across nodes based on row Key.
*/
private void addSubColumns(String family, CassandraQuery<K, T> cassandraQuery, CassandraResultSet<K> cassandraResultSet) {
// select family columns that are included in the query
List<Row<K, ByteBuffer, ByteBuffer>> rows = this.cassandraClient.execute(cassandraQuery, family);
for (Row<K, ByteBuffer, ByteBuffer> row : rows) {
K key = row.getKey();
// find associated row in the resultset
CassandraRow<K> cassandraRow = cassandraResultSet.getRow(key);
if (cassandraRow == null) {
cassandraRow = new CassandraRow<>();
cassandraResultSet.putRow(key, cassandraRow);
cassandraRow.setKey(key);
}
ColumnSlice<ByteBuffer, ByteBuffer> columnSlice = row.getColumnSlice();
for (HColumn<ByteBuffer, ByteBuffer> hColumn : columnSlice.getColumns()) {
CassandraSubColumn cassandraSubColumn = new CassandraSubColumn();
cassandraSubColumn.setValue(hColumn);
cassandraSubColumn.setFamily(family);
cassandraRow.add(cassandraSubColumn);
}
}
}
use of me.prettyprint.hector.api.beans.Row in project logprocessing by cloudian.
the class CDRDataAccess method getCDRsByHour.
// Query the DB to get a list of counts and an hourly timestamp. This is for all msisdns for
// a particular market and message type.
public Vector<CDREntry> getCDRsByHour(long minTimestamp, long maxTimestamp, String market, String messageType, int limit) {
Vector<CDREntry> vCDRs = new Vector<CDREntry>();
// Truncate minTime and maxTime to nearest hour to get row keys.
long minHour = (minTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
long maxHour = (maxTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
String strMinHour = m_sdfHourly.format(new Date(minHour));
String strMaxHour = m_sdfHourly.format(new Date(maxHour));
int numRecordsRetrieved = 0;
String strBegColTS = "";
try {
// Get a chunk of rows (hourly data). TODO: Need to take care of the last row in the
// list as a special case - since it will probably not contain all the entries for the hour.
// TODO: Query the DB in a loop, getting up to 'limit' records between min and max timestamp.
// Get at most 5 hours of data at a time.
int ROWLIMIT = 5;
while (numRecordsRetrieved < limit) {
List<Row<String, String, String>> rows = m_daoHourlyTimeline.getRangeSlice(strMinHour, strMaxHour, "", ROWLIMIT, true);
for (Row<String, String, String> row : rows) {
// One row contains all the CDRs for a particular hour.
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (int i = 0; i < cols.size(); ++i) {
HColumn<String, String> result = cols.get(i);
String sEntryID = new String(result.getValue());
String sType = m_daoCDREntry.get(sEntryID, COL_TYPE, StringSerializer.get());
String sMarket = m_daoCDREntry.get(sEntryID, COL_MARKET, StringSerializer.get());
String entryTimestampStr = result.getName();
String[] colNameParts = entryTimestampStr.split("_");
if (colNameParts == null || colNameParts.length < 2) {
logger.error("Invalid key from MSISDNTimeline table: " + entryTimestampStr);
continue;
}
long entryTimestamp = Long.parseLong(colNameParts[0]);
// filter out any entries not within the time stamp range.
if (entryTimestamp > maxTimestamp || entryTimestamp < minTimestamp) {
continue;
}
// Filter by market and message type
if (sType == null || sMarket == null || (sType.compareTo(messageType) != 0 && !messageType.equals(MESSAGE_TYPE_ALL)) || sMarket.compareTo(market) != 0) {
// Skip this entry
continue;
}
String sMOIPAddress = m_daoCDREntry.get(sEntryID, COL_MOIPADDRESS, StringSerializer.get());
String sMTIPAddress = m_daoCDREntry.get(sEntryID, COL_MTIPADDRESS, StringSerializer.get());
String sSenderDomain = m_daoCDREntry.get(sEntryID, COL_SENDERDOMAIN, StringSerializer.get());
String sRecipientDomain = m_daoCDREntry.get(sEntryID, COL_RECIPIENTDOMAIN, StringSerializer.get());
String sMSISDN = m_daoCDREntry.get(sEntryID, COL_MSISDN, StringSerializer.get());
String sTimestamp = m_daoCDREntry.get(sEntryID, COL_TIMESTAMP, StringSerializer.get());
CDREntry entry = new CDREntry(sEntryID, sMSISDN, sType, sMOIPAddress, sMTIPAddress, sSenderDomain, sRecipientDomain, sTimestamp, sMarket);
vCDRs.add(entry);
numRecordsRetrieved++;
}
}
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
return vCDRs;
}
use of me.prettyprint.hector.api.beans.Row in project logprocessing by cloudian.
the class CDRDataAccess method getChartDataByHour.
public List<ChartSeries> getChartDataByHour(long minTimestamp, long maxTimestamp, String market, String messageType, int limit) {
List<ChartSeries> chartData = new ArrayList<ChartSeries>();
ChartSeries series = new ChartSeries("CDR count for all MSISDNs for " + market + ", with type = " + messageType);
List<ChartValueByTime> chartVals = new ArrayList<ChartValueByTime>();
try {
// Modify the maxTimestamp, since it is 'inclusive'. Decrease it by 1ms
maxTimestamp--;
// Truncate minTime and maxTime to nearest hour
long maxHour = (maxTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
long minHour = (minTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
String strMinHour = m_sdfHourly.format(new Date(minHour));
String strMaxHour = m_sdfHourly.format(new Date(maxHour));
//long begColTS = 0;
int currentCount = 0;
// Get a chunk of rows - one row contains one data value. TODO: Need to take care of the last row in the
// list as a special case - since it will probably not contain all the entries for the hour.
// TODO: query the DB in a loop, getting all records between min and max hour.
List<Row<String, String, String>> rows = m_daoHourlyTimeline.getRangeSlice(strMinHour, strMaxHour, "", LIMIT, false);
for (Row<String, String, String> row : rows) {
// Hourly time stamp.
String strTime = row.getKey();
long timeStamp = Long.parseLong(strTime);
// One row contains all the CDRs for a particular hour. Count them.
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (int i = 0; i < cols.size(); ++i) {
HColumn<String, String> result = cols.get(i);
String sEntryID = new String(result.getValue());
String sType = m_daoCDREntry.get(sEntryID, COL_TYPE, StringSerializer.get());
String sMarket = m_daoCDREntry.get(sEntryID, COL_MARKET, StringSerializer.get());
// Filter by market and message type
if (sType == null || sMarket == null || (sType.compareTo(messageType) != 0 && !messageType.equals(MESSAGE_TYPE_ALL)) || sMarket.compareTo(market) != 0) {
// Skip this entry
continue;
}
currentCount++;
}
if (currentCount > 0) {
ChartValueByTime chartval = new ChartValueByTime(currentCount, timeStamp);
chartVals.add(chartval);
}
currentCount = 0;
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
series.setData(chartVals);
chartData.add(series);
return chartData;
}
Aggregations