use of org.apache.jackrabbit.core.data.DataStoreException in project jackrabbit by apache.
the class S3Backend method getLength.
@Override
public long getLength(DataIdentifier identifier) throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
ObjectMetadata object = s3service.getObjectMetadata(bucket, key);
long length = object.getContentLength();
LOG.debug("Identifier [{}]'s length = [{}] took [{}]ms.", new Object[] { identifier, length, (System.currentTimeMillis() - start) });
return length;
} catch (AmazonServiceException e) {
throw new DataStoreException("Could not length of dataIdentifier " + identifier, e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
use of org.apache.jackrabbit.core.data.DataStoreException in project jackrabbit-oak by apache.
the class SafeDataStoreBlobStore method getReference.
@Override
public String getReference(@Nonnull String encodedBlobId) {
checkNotNull(encodedBlobId);
String blobId = extractBlobId(encodedBlobId);
//Reference are not created for in memory record
if (InMemoryDataRecord.isInstance(blobId)) {
return null;
}
DataRecord record;
try {
record = delegate.getRecordIfStored(new DataIdentifier(blobId));
if (record != null) {
return record.getReference();
} else {
log.debug("No blob found for id [{}]", blobId);
}
} catch (DataStoreException e) {
log.warn("Unable to access the blobId for [{}]", blobId, e);
}
return null;
}
use of org.apache.jackrabbit.core.data.DataStoreException in project jackrabbit by apache.
the class S3Backend method init.
public void init(CachingDataStore store, String homeDir, Properties prop) throws DataStoreException {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
startTime = new Date();
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
LOG.debug("init");
setDataStore(store);
s3ReqDecorator = new S3RequestDecorator(prop);
s3service = Utils.openService(prop);
if (bucket == null || "".equals(bucket.trim())) {
bucket = prop.getProperty(S3Constants.S3_BUCKET);
}
String region = prop.getProperty(S3Constants.S3_REGION);
Region s3Region = null;
if (StringUtils.isNullOrEmpty(region)) {
com.amazonaws.regions.Region ec2Region = Regions.getCurrentRegion();
if (ec2Region != null) {
s3Region = Region.fromValue(ec2Region.getName());
} else {
throw new AmazonClientException("parameter [" + S3Constants.S3_REGION + "] not configured and cannot be derived from environment");
}
} else {
if (Utils.DEFAULT_AWS_BUCKET_REGION.equals(region)) {
s3Region = Region.US_Standard;
} else if (Region.EU_Ireland.toString().equals(region)) {
s3Region = Region.EU_Ireland;
} else {
s3Region = Region.fromValue(region);
}
}
if (!s3service.doesBucketExist(bucket)) {
s3service.createBucket(bucket, s3Region);
LOG.info("Created bucket [{}] in [{}] ", bucket, region);
} else {
LOG.info("Using bucket [{}] in [{}] ", bucket, region);
}
int writeThreads = 10;
String writeThreadsStr = prop.getProperty(S3Constants.S3_WRITE_THREADS);
if (writeThreadsStr != null) {
writeThreads = Integer.parseInt(writeThreadsStr);
}
LOG.info("Using thread pool of [{}] threads in S3 transfer manager.", writeThreads);
tmx = new TransferManager(s3service, (ThreadPoolExecutor) Executors.newFixedThreadPool(writeThreads, new NamedThreadFactory("s3-transfer-manager-worker")));
int asyncWritePoolSize = 10;
String maxConnsStr = prop.getProperty(S3Constants.S3_MAX_CONNS);
if (maxConnsStr != null) {
asyncWritePoolSize = Integer.parseInt(maxConnsStr) - writeThreads;
}
setAsyncWritePoolSize(asyncWritePoolSize);
String renameKeyProp = prop.getProperty(S3Constants.S3_RENAME_KEYS);
boolean renameKeyBool = (renameKeyProp == null || "".equals(renameKeyProp)) ? false : Boolean.parseBoolean(renameKeyProp);
LOG.info("Rename keys [{}]", renameKeyBool);
if (renameKeyBool) {
renameKeys();
}
LOG.debug("S3 Backend initialized in [{}] ms", +(System.currentTimeMillis() - startTime.getTime()));
} catch (Exception e) {
LOG.debug(" error ", e);
throw new DataStoreException("Could not initialize S3 from " + prop, e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
use of org.apache.jackrabbit.core.data.DataStoreException in project jackrabbit by apache.
the class S3Backend method getAllIdentifiers.
@Override
public Iterator<DataIdentifier> getAllIdentifiers() throws DataStoreException {
long start = System.currentTimeMillis();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Set<DataIdentifier> ids = new HashSet<DataIdentifier>();
ObjectListing prevObjectListing = s3service.listObjects(bucket);
while (true) {
for (S3ObjectSummary s3ObjSumm : prevObjectListing.getObjectSummaries()) {
String id = getIdentifierName(s3ObjSumm.getKey());
if (id != null) {
ids.add(new DataIdentifier(id));
}
}
if (!prevObjectListing.isTruncated())
break;
prevObjectListing = s3service.listNextBatchOfObjects(prevObjectListing);
}
LOG.debug("getAllIdentifiers returned size [{}] took [{}] ms.", ids.size(), (System.currentTimeMillis() - start));
return ids.iterator();
} catch (AmazonServiceException e) {
throw new DataStoreException("Could not list objects", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
use of org.apache.jackrabbit.core.data.DataStoreException in project jackrabbit by apache.
the class S3Backend method exists.
/**
* Check if record identified by identifier exists in Amazon S3.
*/
@Override
public boolean exists(DataIdentifier identifier) throws DataStoreException {
long start = System.currentTimeMillis();
String key = getKeyName(identifier);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
ObjectMetadata objectMetaData = s3service.getObjectMetadata(bucket, key);
if (objectMetaData != null) {
LOG.trace("exists [{}]: [true] took [{}] ms.", identifier, (System.currentTimeMillis() - start));
return true;
}
return false;
} catch (AmazonServiceException e) {
if (e.getStatusCode() == 404 || e.getStatusCode() == 403) {
LOG.debug("exists [{}]: [false] took [{}] ms.", identifier, (System.currentTimeMillis() - start));
return false;
}
throw new DataStoreException("Error occured to getObjectMetadata for key [" + identifier.toString() + "]", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
Aggregations