use of me.prettyprint.hector.api.beans.HColumn in project logprocessing by cloudian.
the class CDRDataAccess method getCDRsByHour.
// Query the DB to get a list of counts and an hourly timestamp. This is for all msisdns for
// a particular market and message type.
public Vector<CDREntry> getCDRsByHour(long minTimestamp, long maxTimestamp, String market, String messageType, int limit) {
Vector<CDREntry> vCDRs = new Vector<CDREntry>();
// Truncate minTime and maxTime to nearest hour to get row keys.
long minHour = (minTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
long maxHour = (maxTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
String strMinHour = m_sdfHourly.format(new Date(minHour));
String strMaxHour = m_sdfHourly.format(new Date(maxHour));
int numRecordsRetrieved = 0;
String strBegColTS = "";
try {
// Get a chunk of rows (hourly data). TODO: Need to take care of the last row in the
// list as a special case - since it will probably not contain all the entries for the hour.
// TODO: Query the DB in a loop, getting up to 'limit' records between min and max timestamp.
// Get at most 5 hours of data at a time.
int ROWLIMIT = 5;
while (numRecordsRetrieved < limit) {
List<Row<String, String, String>> rows = m_daoHourlyTimeline.getRangeSlice(strMinHour, strMaxHour, "", ROWLIMIT, true);
for (Row<String, String, String> row : rows) {
// One row contains all the CDRs for a particular hour.
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (int i = 0; i < cols.size(); ++i) {
HColumn<String, String> result = cols.get(i);
String sEntryID = new String(result.getValue());
String sType = m_daoCDREntry.get(sEntryID, COL_TYPE, StringSerializer.get());
String sMarket = m_daoCDREntry.get(sEntryID, COL_MARKET, StringSerializer.get());
String entryTimestampStr = result.getName();
String[] colNameParts = entryTimestampStr.split("_");
if (colNameParts == null || colNameParts.length < 2) {
logger.error("Invalid key from MSISDNTimeline table: " + entryTimestampStr);
continue;
}
long entryTimestamp = Long.parseLong(colNameParts[0]);
// filter out any entries not within the time stamp range.
if (entryTimestamp > maxTimestamp || entryTimestamp < minTimestamp) {
continue;
}
// Filter by market and message type
if (sType == null || sMarket == null || (sType.compareTo(messageType) != 0 && !messageType.equals(MESSAGE_TYPE_ALL)) || sMarket.compareTo(market) != 0) {
// Skip this entry
continue;
}
String sMOIPAddress = m_daoCDREntry.get(sEntryID, COL_MOIPADDRESS, StringSerializer.get());
String sMTIPAddress = m_daoCDREntry.get(sEntryID, COL_MTIPADDRESS, StringSerializer.get());
String sSenderDomain = m_daoCDREntry.get(sEntryID, COL_SENDERDOMAIN, StringSerializer.get());
String sRecipientDomain = m_daoCDREntry.get(sEntryID, COL_RECIPIENTDOMAIN, StringSerializer.get());
String sMSISDN = m_daoCDREntry.get(sEntryID, COL_MSISDN, StringSerializer.get());
String sTimestamp = m_daoCDREntry.get(sEntryID, COL_TIMESTAMP, StringSerializer.get());
CDREntry entry = new CDREntry(sEntryID, sMSISDN, sType, sMOIPAddress, sMTIPAddress, sSenderDomain, sRecipientDomain, sTimestamp, sMarket);
vCDRs.add(entry);
numRecordsRetrieved++;
}
}
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
return vCDRs;
}
use of me.prettyprint.hector.api.beans.HColumn in project logprocessing by cloudian.
the class CDRDataAccess method getChartDataByHour.
public List<ChartSeries> getChartDataByHour(long minTimestamp, long maxTimestamp, String market, String messageType, int limit) {
List<ChartSeries> chartData = new ArrayList<ChartSeries>();
ChartSeries series = new ChartSeries("CDR count for all MSISDNs for " + market + ", with type = " + messageType);
List<ChartValueByTime> chartVals = new ArrayList<ChartValueByTime>();
try {
// Modify the maxTimestamp, since it is 'inclusive'. Decrease it by 1ms
maxTimestamp--;
// Truncate minTime and maxTime to nearest hour
long maxHour = (maxTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
long minHour = (minTimestamp / MS_PER_HOUR) * MS_PER_HOUR;
String strMinHour = m_sdfHourly.format(new Date(minHour));
String strMaxHour = m_sdfHourly.format(new Date(maxHour));
//long begColTS = 0;
int currentCount = 0;
// Get a chunk of rows - one row contains one data value. TODO: Need to take care of the last row in the
// list as a special case - since it will probably not contain all the entries for the hour.
// TODO: query the DB in a loop, getting all records between min and max hour.
List<Row<String, String, String>> rows = m_daoHourlyTimeline.getRangeSlice(strMinHour, strMaxHour, "", LIMIT, false);
for (Row<String, String, String> row : rows) {
// Hourly time stamp.
String strTime = row.getKey();
long timeStamp = Long.parseLong(strTime);
// One row contains all the CDRs for a particular hour. Count them.
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (int i = 0; i < cols.size(); ++i) {
HColumn<String, String> result = cols.get(i);
String sEntryID = new String(result.getValue());
String sType = m_daoCDREntry.get(sEntryID, COL_TYPE, StringSerializer.get());
String sMarket = m_daoCDREntry.get(sEntryID, COL_MARKET, StringSerializer.get());
// Filter by market and message type
if (sType == null || sMarket == null || (sType.compareTo(messageType) != 0 && !messageType.equals(MESSAGE_TYPE_ALL)) || sMarket.compareTo(market) != 0) {
// Skip this entry
continue;
}
currentCount++;
}
if (currentCount > 0) {
ChartValueByTime chartval = new ChartValueByTime(currentCount, timeStamp);
chartVals.add(chartval);
}
currentCount = 0;
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
series.setData(chartVals);
chartData.add(series);
return chartData;
}
use of me.prettyprint.hector.api.beans.HColumn in project logprocessing by cloudian.
the class CDRDataAccess method getChartDataByMSISDN.
public List<ChartSeries> getChartDataByMSISDN(String msisdn, long minTimestamp, long maxTimestamp, String market, String messageType, int limit) {
List<ChartSeries> chartData = new ArrayList<ChartSeries>();
ChartSeries series = new ChartSeries("CDR count for : " + msisdn);
List<ChartValueByTime> chartVals = new ArrayList<ChartValueByTime>();
try {
// Modify the maxTimestamp, since it is 'inclusive'. Decrease it by 1 ms
maxTimestamp--;
String strMinStamp = Long.toString(minTimestamp);
String strMaxStamp = Long.toString(maxTimestamp);
List<HColumn<String, String>> results = m_daoMSISDNTimeline.getSliceUsingTimestamp(msisdn, strMinStamp, strMaxStamp, limit, false);
long currentHour = minTimestamp;
currentHour = (currentHour / MS_PER_HOUR) * MS_PER_HOUR;
int currentCount = 0;
// Split up the data based on hour
for (int i = 0; i < results.size(); i++) {
HColumn<String, String> result = results.get(i);
String entryTimestampStr = result.getName();
String[] colNameParts = entryTimestampStr.split("_");
if (colNameParts == null || colNameParts.length < 2) {
logger.error("Invalid key from MSISDNTimeline table: " + entryTimestampStr);
break;
}
long entryTimestamp = Long.parseLong(colNameParts[0]);
String sEntryID = new String(result.getValue());
String sType = m_daoCDREntry.get(sEntryID, COL_TYPE, StringSerializer.get());
String sMarket = m_daoCDREntry.get(sEntryID, COL_MARKET, StringSerializer.get());
// Filter by market and message type
if (sType == null || sMarket == null || (sType.compareTo(messageType) != 0 && !messageType.equals(MESSAGE_TYPE_ALL)) || sMarket.compareTo(market) != 0) {
// Skip this entry
continue;
}
//long entryTimestamp = Long.parseLong(sTimestamp);
if (entryTimestamp < (currentHour + MS_PER_HOUR)) {
// Keep count of the number of CDRs for this hour
currentCount++;
} else {
// Done with this hour
ChartValueByTime chartval = new ChartValueByTime(currentCount, currentHour);
chartVals.add(chartval);
// Reset counters
currentHour = currentHour + MS_PER_HOUR;
currentCount = 1;
}
}
// Take care of final hour
if (currentCount > 0) {
ChartValueByTime chartval = new ChartValueByTime(currentCount, currentHour);
chartVals.add(chartval);
currentCount = 0;
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
series.setData(chartVals);
chartData.add(series);
return chartData;
}
use of me.prettyprint.hector.api.beans.HColumn in project logprocessing by cloudian.
the class CDRDataAccess method deleteBeforeTS.
// Delete all entries before (and including) a particular hour timestamp.
// For the hourly timeline,
public void deleteBeforeTS(long hourStamp) {
String sLastHour = m_sdfHourly.format(new Date(hourStamp));
String strLastHourCompare = String.format("%013d", hourStamp);
// those columns.
try {
boolean bMoreRows = true;
String begRangeKey = "";
// Handle HourlyTimeline and CDREntry CFs first
while (bMoreRows) {
List<Row<String, String, String>> rows = m_daoHourlyTimeline.getRangeSlice(begRangeKey, sLastHour, "", LIMIT, false);
if (rows.size() > 0) {
Row<String, String, String> lastRow = rows.get(rows.size() - 1);
// Get start key for next range slice
begRangeKey = lastRow.getKey();
for (Row<String, String, String> row : rows) {
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (HColumn<String, String> col : cols) {
String entryId = col.getValue();
// Delete individual CDR entry
m_daoCDREntry.delete(entryId, null, StringSerializer.get());
}
String hourKey = row.getKey();
// Delete entire row.
m_daoHourlyTimeline.delete(hourKey, null, StringSerializer.get());
}
if (rows.size() == 1) {
// this is the last row in the slice - data was already deleted.
bMoreRows = false;
}
} else {
// Done operating on rows for HourlyTimeline
bMoreRows = false;
}
}
// Now take care of MSISDNTimeline
bMoreRows = true;
begRangeKey = "";
while (bMoreRows) {
List<Row<String, String, String>> rows = m_daoMSISDNTimeline.getRangeSlice(begRangeKey, "", "", LIMIT, false);
if (rows.size() > 0) {
Row<String, String, String> lastRow = rows.get(rows.size() - 1);
// Get start key for next range slice
begRangeKey = lastRow.getKey();
for (Row<String, String, String> row : rows) {
List<HColumn<String, String>> cols = row.getColumnSlice().getColumns();
for (HColumn<String, String> col : cols) {
String colStamp = col.getName();
if (colStamp.compareTo(strLastHourCompare) < 0) {
// Lexicographic comparison - want to ignore the UUID portion of column name
// Delete MSISDNTimeline column
m_daoHourlyTimeline.delete(row.getKey(), colStamp, StringSerializer.get());
// TODO: Should we optimize to add all deletions for the row, and then execute??
}
}
}
if (rows.size() == 1) {
// this is the last row in the slice - data was already deleted.
bMoreRows = false;
}
} else {
// Done operating on rows for MSISDNTimeline
bMoreRows = false;
}
}
} catch (Exception e) {
logger.error(e.getMessage());
e.printStackTrace();
}
}
use of me.prettyprint.hector.api.beans.HColumn in project sling by apache.
the class AccessControlUtil method updateACL.
private void updateACL(String rid, String policy, StringSerializer se, QueryResult<CqlRows<String, String, String>> results) {
String oldACL = "";
for (Row<String, String, String> row : ((CqlRows<String, String, String>) results.get()).getList()) {
for (HColumn column : row.getColumnSlice().getColumns()) {
if ("policy".equalsIgnoreCase(column.getName().toString()) && column.getValue() != null) {
oldACL = column.getValue().toString();
}
}
}
if (!oldACL.isEmpty()) {
oldACL = oldACL + ";" + policy;
}
addACL(rid, oldACL, new StringSerializer());
}
Aggregations