use of com.amazonaws.services.s3.model.S3ObjectSummary in project ice by Netflix.
the class BasicManagers method doWork.
private void doWork() {
logger.info("trying to find new tag group and data managers...");
Set<Product> products = Sets.newHashSet(this.products);
Map<Product, BasicTagGroupManager> tagGroupManagers = Maps.newHashMap(this.tagGroupManagers);
TreeMap<Key, BasicDataManager> costManagers = Maps.newTreeMap(this.costManagers);
TreeMap<Key, BasicDataManager> usageManagers = Maps.newTreeMap(this.usageManagers);
Set<Product> newProducts = Sets.newHashSet();
AmazonS3Client s3Client = AwsUtils.getAmazonS3Client();
for (S3ObjectSummary s3ObjectSummary : s3Client.listObjects(config.workS3BucketName, config.workS3BucketPrefix + TagGroupWriter.DB_PREFIX).getObjectSummaries()) {
String key = s3ObjectSummary.getKey();
Product product;
if (key.endsWith("_all")) {
product = null;
} else {
String name = key.substring((config.workS3BucketPrefix + TagGroupWriter.DB_PREFIX).length());
product = config.productService.getProductByName(name);
}
if (!products.contains(product)) {
products.add(product);
newProducts.add(product);
}
}
for (Product product : newProducts) {
tagGroupManagers.put(product, new BasicTagGroupManager(product));
for (ConsolidateType consolidateType : ConsolidateType.values()) {
Key key = new Key(product, consolidateType);
costManagers.put(key, new BasicDataManager(product, consolidateType, true));
usageManagers.put(key, new BasicDataManager(product, consolidateType, false));
}
}
if (newProducts.size() > 0) {
this.costManagers = costManagers;
this.usageManagers = usageManagers;
this.tagGroupManagers = tagGroupManagers;
this.products = products;
}
}
use of com.amazonaws.services.s3.model.S3ObjectSummary in project ice by Netflix.
the class BillingFileProcessor method poll.
@Override
protected void poll() throws Exception {
TreeMap<DateTime, List<BillingFile>> filesToProcess = Maps.newTreeMap();
Map<DateTime, List<BillingFile>> monitorFilesToProcess = Maps.newTreeMap();
// list the tar.gz file in billing file folder
for (int i = 0; i < config.billingS3BucketNames.length; i++) {
String billingS3BucketName = config.billingS3BucketNames[i];
String billingS3BucketPrefix = config.billingS3BucketPrefixes.length > i ? config.billingS3BucketPrefixes[i] : "";
String accountId = config.billingAccountIds.length > i ? config.billingAccountIds[i] : "";
String billingAccessRoleName = config.billingAccessRoleNames.length > i ? config.billingAccessRoleNames[i] : "";
String billingAccessExternalId = config.billingAccessExternalIds.length > i ? config.billingAccessExternalIds[i] : "";
logger.info("trying to list objects in billing bucket " + billingS3BucketName + " using assume role, and external id " + billingAccessRoleName + " " + billingAccessExternalId);
List<S3ObjectSummary> objectSummaries = AwsUtils.listAllObjects(billingS3BucketName, billingS3BucketPrefix, accountId, billingAccessRoleName, billingAccessExternalId);
logger.info("found " + objectSummaries.size() + " in billing bucket " + billingS3BucketName);
TreeMap<DateTime, S3ObjectSummary> filesToProcessInOneBucket = Maps.newTreeMap();
Map<DateTime, S3ObjectSummary> monitorFilesToProcessInOneBucket = Maps.newTreeMap();
// for each file, download&process if not needed
for (S3ObjectSummary objectSummary : objectSummaries) {
String fileKey = objectSummary.getKey();
DateTime dataTime = AwsUtils.getDateTimeFromFileNameWithTags(fileKey);
boolean withTags = true;
if (dataTime == null) {
dataTime = AwsUtils.getDateTimeFromFileName(fileKey);
withTags = false;
}
if (dataTime != null && !dataTime.isBefore(config.startDate)) {
if (!filesToProcessInOneBucket.containsKey(dataTime) || withTags && config.resourceService != null || !withTags && config.resourceService == null)
filesToProcessInOneBucket.put(dataTime, objectSummary);
else
logger.info("ignoring file " + objectSummary.getKey());
} else {
logger.info("ignoring file " + objectSummary.getKey());
}
}
for (S3ObjectSummary objectSummary : objectSummaries) {
String fileKey = objectSummary.getKey();
DateTime dataTime = AwsUtils.getDateTimeFromFileNameWithMonitoring(fileKey);
if (dataTime != null && !dataTime.isBefore(config.startDate)) {
monitorFilesToProcessInOneBucket.put(dataTime, objectSummary);
}
}
for (DateTime key : filesToProcessInOneBucket.keySet()) {
List<BillingFile> list = filesToProcess.get(key);
if (list == null) {
list = Lists.newArrayList();
filesToProcess.put(key, list);
}
list.add(new BillingFile(filesToProcessInOneBucket.get(key), accountId, billingAccessRoleName, billingAccessExternalId, billingS3BucketPrefix));
}
for (DateTime key : monitorFilesToProcessInOneBucket.keySet()) {
List<BillingFile> list = monitorFilesToProcess.get(key);
if (list == null) {
list = Lists.newArrayList();
monitorFilesToProcess.put(key, list);
}
list.add(new BillingFile(monitorFilesToProcessInOneBucket.get(key), accountId, billingAccessRoleName, billingAccessExternalId, billingS3BucketPrefix));
}
}
for (DateTime dataTime : filesToProcess.keySet()) {
startMilli = endMilli = dataTime.getMillis();
init();
boolean hasNewFiles = false;
boolean hasTags = false;
long lastProcessed = lastProcessTime(AwsUtils.monthDateFormat.print(dataTime));
for (BillingFile billingFile : filesToProcess.get(dataTime)) {
S3ObjectSummary objectSummary = billingFile.s3ObjectSummary;
if (objectSummary.getLastModified().getTime() < lastProcessed) {
logger.info("data has been processed. ignoring " + objectSummary.getKey() + "...");
continue;
}
hasNewFiles = true;
}
if (!hasNewFiles) {
logger.info("data has been processed. ignoring all files at " + AwsUtils.monthDateFormat.print(dataTime));
continue;
}
long processTime = new DateTime(DateTimeZone.UTC).getMillis();
for (BillingFile billingFile : filesToProcess.get(dataTime)) {
S3ObjectSummary objectSummary = billingFile.s3ObjectSummary;
String fileKey = objectSummary.getKey();
File file = new File(config.localDir, fileKey.substring(billingFile.prefix.length()));
logger.info("trying to download " + fileKey + "...");
boolean downloaded = AwsUtils.downloadFileIfChangedSince(objectSummary.getBucketName(), billingFile.prefix, file, lastProcessed, billingFile.accountId, billingFile.accessRoleName, billingFile.externalId);
if (downloaded)
logger.info("downloaded " + fileKey);
else {
logger.info("file already downloaded " + fileKey + "...");
}
logger.info("processing " + fileKey + "...");
boolean withTags = fileKey.contains("with-resources-and-tags");
hasTags = hasTags || withTags;
processingMonitor = false;
processBillingZipFile(file, withTags);
logger.info("done processing " + fileKey);
}
if (monitorFilesToProcess.get(dataTime) != null) {
for (BillingFile monitorBillingFile : monitorFilesToProcess.get(dataTime)) {
S3ObjectSummary monitorObjectSummary = monitorBillingFile.s3ObjectSummary;
if (monitorObjectSummary != null) {
String monitorFileKey = monitorObjectSummary.getKey();
logger.info("processing " + monitorFileKey + "...");
File monitorFile = new File(config.localDir, monitorFileKey.substring(monitorFileKey.lastIndexOf("/") + 1));
logger.info("trying to download " + monitorFileKey + "...");
boolean downloaded = AwsUtils.downloadFileIfChangedSince(monitorObjectSummary.getBucketName(), monitorBillingFile.prefix, monitorFile, lastProcessed, monitorBillingFile.accountId, monitorBillingFile.accessRoleName, monitorBillingFile.externalId);
if (downloaded)
logger.info("downloaded " + monitorFile);
else
logger.warn(monitorFile + "already downloaded...");
FileInputStream in = new FileInputStream(monitorFile);
try {
processingMonitor = true;
processBillingFile(monitorFile.getName(), in, true);
} catch (Exception e) {
logger.error("Error processing " + monitorFile, e);
} finally {
in.close();
}
}
}
}
if (dataTime.equals(filesToProcess.lastKey())) {
int hours = (int) ((endMilli - startMilli) / 3600000L);
logger.info("cut hours to " + hours);
cutData(hours);
}
// now get reservation capacity to calculate upfront and un-used cost
for (Ec2InstanceReservationPrice.ReservationUtilization utilization : Ec2InstanceReservationPrice.ReservationUtilization.values()) processReservations(utilization);
if (hasTags && config.resourceService != null)
config.resourceService.commit();
logger.info("archiving results for " + dataTime + "...");
archive();
logger.info("done archiving " + dataTime);
updateProcessTime(AwsUtils.monthDateFormat.print(dataTime), processTime);
if (dataTime.equals(filesToProcess.lastKey())) {
sendOndemandCostAlert();
}
}
logger.info("AWS usage processed.");
}
use of com.amazonaws.services.s3.model.S3ObjectSummary in project exhibitor by soabase.
the class S3PseudoLock method getFileNames.
@Override
protected List<String> getFileNames(String lockPrefix) throws Exception {
ListObjectsRequest request = new ListObjectsRequest();
request.setBucketName(bucket);
request.setPrefix(lockPrefix);
ObjectListing objectListing = client.listObjects(request);
return Lists.transform(objectListing.getObjectSummaries(), new Function<S3ObjectSummary, String>() {
@Override
public String apply(S3ObjectSummary summary) {
return summary.getKey();
}
});
}
use of com.amazonaws.services.s3.model.S3ObjectSummary in project exhibitor by soabase.
the class TestS3BackupProviderBase method testGetAvailableBackupKeys.
@Test
public void testGetAvailableBackupKeys() throws Exception {
ObjectListing listing = new ObjectListing() {
@Override
public List<S3ObjectSummary> getObjectSummaries() {
List<S3ObjectSummary> list = Lists.newArrayList();
S3ObjectSummary summary = new S3ObjectSummary();
summary.setKey("exhibitor-backup" + S3BackupProvider.SEPARATOR + "one" + S3BackupProvider.SEPARATOR + "1234");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey("exhibitor-backup" + S3BackupProvider.SEPARATOR + "two" + S3BackupProvider.SEPARATOR + "1234");
list.add(summary);
summary = new S3ObjectSummary();
summary.setKey("exhibitor-backup" + S3BackupProvider.SEPARATOR + "three" + S3BackupProvider.SEPARATOR + "1234");
list.add(summary);
return list;
}
};
MockS3Client s3Client = new MockS3Client(null, listing);
S3BackupProvider provider = new S3BackupProvider(new MockS3ClientFactory(s3Client), new PropertyBasedS3Credential(new Properties()), new PropertyBasedS3ClientConfig(new Properties()), null);
List<BackupMetaData> backups = provider.getAvailableBackups(null, Maps.<String, String>newHashMap());
List<String> backupNames = Lists.transform(backups, new Function<BackupMetaData, String>() {
@Override
public String apply(BackupMetaData metaData) {
return metaData.getName();
}
});
Assert.assertEquals(backupNames, Arrays.asList("one", "two", "three"));
}
use of com.amazonaws.services.s3.model.S3ObjectSummary in project h2o-2 by h2oai.
the class ImportFiles2 method serveS3.
protected void serveS3() {
Futures fs = new Futures();
assert path.startsWith("s3://");
path = path.substring(5);
int bend = path.indexOf('/');
if (bend == -1)
bend = path.length();
String bucket = path.substring(0, bend);
String prefix = bend < path.length() ? path.substring(bend + 1) : "";
AmazonS3 s3 = PersistS3.getClient();
if (!s3.doesBucketExist(bucket))
throw new IllegalArgumentException("S3 Bucket " + bucket + " not found!");
;
ArrayList<String> succ = new ArrayList<String>();
ArrayList<String> fail = new ArrayList<String>();
ObjectListing currentList = s3.listObjects(bucket, prefix);
while (true) {
for (S3ObjectSummary obj : currentList.getObjectSummaries()) try {
succ.add(S3FileVec.make(obj, fs).toString());
} catch (Throwable e) {
fail.add(obj.getKey());
Log.err("Failed to loadfile from S3: path = " + obj.getKey() + ", error = " + e.getClass().getName() + ", msg = " + e.getMessage());
}
if (currentList.isTruncated())
currentList = s3.listNextBatchOfObjects(currentList);
else
break;
}
keys = succ.toArray(new String[succ.size()]);
files = keys;
fails = fail.toArray(new String[fail.size()]);
this.prefix = getCommonPrefix(keys);
}
Aggregations