use of org.commonjava.indy.model.core.StoreType in project indy by Commonjava.
the class NPMContentAccessHandler method doHead.
@Override
public Response doHead(final String packageType, final String type, final String name, final String path, final Boolean cacheOnly, final String baseUri, final HttpServletRequest request, EventMetadata eventMetadata, final Consumer<Response.ResponseBuilder> builderModifier) {
if (!PackageTypes.contains(packageType)) {
Response.ResponseBuilder builder = Response.status(400);
if (builderModifier != null) {
builderModifier.accept(builder);
}
return builder.build();
}
final StoreType st = StoreType.get(type);
final StoreKey sk = new StoreKey(packageType, st, name);
eventMetadata = eventMetadata.set(ContentManager.ENTRY_POINT_STORE, sk);
Response response = null;
if (path == null || path.equals("")) {
logger.info("Getting listing at: {}", path);
response = RequestUtils.redirectContentListing(packageType, type, name, path, request, builderModifier);
} else {
try {
Transfer item = null;
logger.info("Checking existence of: {}:{} (cache only? {})", sk, path, cacheOnly);
boolean exists = false;
if (Boolean.TRUE.equals(cacheOnly)) {
logger.debug("Calling getTransfer()");
item = contentController.getTransfer(sk, path, TransferOperation.DOWNLOAD);
exists = item != null && item.exists();
logger.debug("Got transfer reference: {}", item);
} else {
logger.debug("Calling remote exists()");
exists = contentController.exists(sk, path);
logger.debug("Got remote exists: {}", exists);
}
if (exists) {
// for npm will fetch the http-meta as the mapping path directly to get the headers info for further header set
HttpExchangeMetadata httpMetadata = contentController.getHttpMetadata(sk, path);
if (item == null) {
logger.info("Retrieving: {}:{} for existence test", sk, path);
item = contentController.get(sk, path, eventMetadata);
logger.debug("Got retrieved transfer reference: {}", item);
}
logger.debug("Building 200 response. Using HTTP metadata: {}", httpMetadata);
final Response.ResponseBuilder builder = Response.ok();
responseHelper.setInfoHeaders(builder, item, sk, path, true, getNPMContentType(path), httpMetadata);
if (builderModifier != null) {
builderModifier.accept(builder);
}
response = builder.build();
} else {
logger.debug("Building 404 (or error) response...");
if (StoreType.remote == st) {
final HttpExchangeMetadata metadata = contentController.getHttpMetadata(sk, path);
if (metadata != null) {
logger.debug("Using HTTP metadata to build negative response.");
response = responseHelper.formatResponseFromMetadata(metadata);
}
}
if (response == null) {
logger.debug("No HTTP metadata; building generic 404 response.");
Response.ResponseBuilder builder = Response.status(Response.Status.NOT_FOUND);
if (builderModifier != null) {
builderModifier.accept(builder);
}
response = builder.build();
}
}
} catch (final IndyWorkflowException e) {
logger.error(String.format("Failed to download artifact: %s from: %s. Reason: %s", path, name, e.getMessage()), e);
response = responseHelper.formatResponse(e, builderModifier);
}
}
return response;
}
use of org.commonjava.indy.model.core.StoreType in project indy by Commonjava.
the class NPMContentAccessHandler method doGet.
@Override
public Response doGet(String packageType, String type, String name, String path, String baseUri, HttpServletRequest request, EventMetadata eventMetadata, Consumer<Response.ResponseBuilder> builderModifier) {
if (!PackageTypes.contains(packageType)) {
return responseWithBuilder(Response.status(400), builderModifier);
}
// hide npm sensitive user info for publish
if (path != null && path.startsWith("-/user")) {
return responseWithBuilder(Response.status(404), builderModifier);
}
final StoreType st = StoreType.get(type);
final StoreKey sk = new StoreKey(packageType, st, name);
eventMetadata.set(ContentManager.ENTRY_POINT_STORE, sk);
setEntryPointBaseUri(request, baseUri, eventMetadata);
final AcceptInfo acceptInfo = jaxRsRequestHelper.findAccept(request, ApplicationContent.text_html);
final String standardAccept = ApplicationContent.getStandardAccept(acceptInfo.getBaseAccept());
Response response = null;
logger.info("GET path: '{}' (RAW: '{}')\nIn store: '{}'\nUser addMetadata header is: '{}'\nStandard addMetadata header for that is: '{}'", path, request.getPathInfo(), sk, acceptInfo.getRawAccept(), standardAccept);
if (path == null || path.equals("")) {
logger.info("Getting listing at: {}", path);
response = RequestUtils.redirectContentListing(packageType, type, name, path, request, builderModifier);
} else {
try {
// NOTE: We do NOT want to map this here. Instead, let's map it when we retrieve a Transfer instance as
// we access the file storage on this system...we do that via StoragePathCalculator, in pkg-npm/common.
// if ( eventMetadata.get( STORAGE_PATH ) != null && StoreType.remote != st )
// {
// // make sure the right mapping path for hosted and group when retrieve content
// path = PathUtils.storagePath( path, eventMetadata );
// }
logger.info("START: retrieval of content: {}/{}", sk, path);
Transfer item = contentController.get(sk, path, eventMetadata);
logger.info("HANDLE: retrieval of content: {}/{}", sk, path);
if (item == null) {
return handleMissingContentQuery(sk, path, builderModifier);
}
boolean handleLocking = false;
if (!item.isWriteLocked()) {
item.lockWrite();
handleLocking = true;
}
try {
if (!item.exists()) {
return handleMissingContentQuery(sk, path, builderModifier);
} else if (item.isDirectory() && StoreType.remote != st) {
logger.info("Getting listing at: {}", path + "/");
response = RequestUtils.redirectContentListing(packageType, type, name, path, request, builderModifier);
} else {
// for remote retrieve, when content has downloaded and cached in the mapping path,
// the item here will be a directory, so reassign the path and item as the mapping one
// Note: as STORAGE_PATH is not used, these code is also useless now.
// if ( item.isDirectory() && StoreType.remote == st )
// {
// path = PathUtils.storagePath( path, eventMetadata );
// origItem = item;
// item = contentController.get( sk, path, eventMetadata );
// }
// if ( item == null )
// {
// logger.error( "Retrieval of actual storage path: {} FAILED!", path );
// responseHelper.throwError( ApplicationStatus.SERVER_ERROR, new NullPointerException( path ), "Retrieval of mapped file from storage failed." );
// }
logger.info("RETURNING: retrieval of content: {}:{}", sk, path);
// open the stream here to prevent deletion while waiting for the transfer back to the user to start...
InputStream in = openInputStreamSafe(item, eventMetadata);
addFieldToActiveSpan(TRANSFER_SIZE, item.length());
final Response.ResponseBuilder builder = Response.ok(new TransferStreamingOutput(in, metricsManager, metricsConfig));
responseHelper.setInfoHeaders(builder, item, sk, path, false, getNPMContentType(path), contentController.getHttpMetadata(item));
response = responseWithBuilder(builder, builderModifier);
// // generating .http-metadata.json for npm group and remote retrieve to resolve header requirements
// // hosted .http-metadata.json will be generated when publish
// // only package.json file will generate this customized http meta to satisfy npm client header check
// if ( eventMetadata.get( STORAGE_PATH ) != null && StoreType.hosted != st )
// {
// generateHttpMetadataHeaders( item, request, response );
// }
}
} finally {
if (handleLocking) {
item.unlock();
}
}
} catch (final IOException | IndyWorkflowException e) {
logger.error(String.format("Failed to download artifact: %s from: %s. Reason: %s", path, name, e.getMessage()), e);
response = responseHelper.formatResponse(e, builderModifier);
}
}
logger.info("RETURNING RESULT: {}:{}", sk, path);
return response;
}
use of org.commonjava.indy.model.core.StoreType in project indy by Commonjava.
the class DataFileStoreUtils method loadFromDiskAnd.
/**
* Load store definitions from disk and apply consumer function.
* @param manager
* @param serializer
* @param key if null, load all.
* @param summary
* @param consumer
*/
public static void loadFromDiskAnd(DataFileManager manager, IndyObjectMapper serializer, StoreKey key, final ChangeSummary summary, Consumer<ArtifactStore> consumer) {
if (// Load a single file
key != null) {
DataFile f = manager.getDataFile(INDY_STORE, key.getPackageType(), key.getType().singularEndpointName(), key.getName() + ".json");
if (f.exists()) {
ArtifactStore store;
try {
String json = f.readString();
store = serializer.readValue(json, key.getType().getStoreClass());
} catch (IOException e) {
logger.error("Failed to read file", e);
return;
}
consumer.accept(store);
}
return;
}
// Load all
DataFile[] packageDirs = manager.getDataFile(INDY_STORE).listFiles((f) -> true);
for (DataFile pkgDir : packageDirs) {
for (StoreType type : StoreType.values()) {
DataFile[] files = pkgDir.getChild(type.singularEndpointName()).listFiles(f -> true);
if (files != null) {
for (final DataFile f : files) {
try {
final String json = f.readString();
final ArtifactStore store = serializer.readValue(json, type.getStoreClass());
if (store == null) {
f.delete(summary);
} else {
consumer.accept(store);
}
} catch (final IOException e) {
logger.error(String.format("Failed to load %s store: %s. Reason: %s", type, f, e.getMessage()), e);
try {
f.delete(summary);
} catch (IOException e1) {
logger.error("Failed to delete invalid store definition file: " + f, e);
}
}
}
}
}
}
}
use of org.commonjava.indy.model.core.StoreType in project indy by Commonjava.
the class DefaultArtifactStoreQuery method getGroupOrdering.
private List<ArtifactStore> getGroupOrdering(final String packageType, final String groupName, final Boolean enabled, final boolean includeGroups, final boolean recurseGroups) throws IndyDataException {
if (packageType == null) {
throw new IndyDataException("packageType must be set on the query before calling this method!");
}
final Group master = (Group) dataManager.getArtifactStore(new StoreKey(packageType, group, groupName));
if (master == null) {
return emptyList();
}
final List<ArtifactStore> result = new ArrayList<>();
AtomicReference<IndyDataException> errorRef = new AtomicReference<>();
LinkedList<Group> toCheck = new LinkedList<>();
Set<StoreKey> seen = new HashSet<>();
toCheck.add(master);
while (!toCheck.isEmpty()) {
Group next = toCheck.removeFirst();
if (next == null || next.isDisabled() && Boolean.TRUE.equals(this.enabled)) {
continue;
}
List<StoreKey> members = new ArrayList<>(next.getConstituents());
if (includeGroups) {
result.add(next);
}
members.forEach((key) -> {
if (!seen.contains(key)) {
seen.add(key);
final StoreType type = key.getType();
try {
if (recurseGroups && type == group) {
// if we're here, we're definitely recursing groups...
Group group = (Group) dataManager.getArtifactStore(key);
toCheck.addFirst(group);
} else {
final ArtifactStore store = dataManager.getArtifactStore(key);
if (store != null && !(store.isDisabled() && Boolean.TRUE.equals(this.enabled))) {
result.add(store);
}
}
} catch (IndyDataException e) {
errorRef.set(e);
}
}
});
IndyDataException error = errorRef.get();
if (error != null) {
throw error;
}
}
return result;
}
use of org.commonjava.indy.model.core.StoreType in project indy by Commonjava.
the class LegacyDataMigrationAction method doMigrate.
private boolean doMigrate() throws IndyLifecycleException {
if (!(storeDataManager instanceof DataFileStoreDataManager)) {
logger.info("Store manager: {} is not based on DataFile's. Skipping migration.", storeDataManager.getClass().getName());
return false;
}
final DataFile basedir = dataFileManager.getDataFile(INDY_STORE);
final ChangeSummary summary = new ChangeSummary(ChangeSummary.SYSTEM_USER, "Migrating legacy store definitions.");
if (!basedir.exists()) {
return false;
}
StoreType[] storeTypes = StoreType.values();
final String[] dirs = basedir.list();
if (dirs == null || dirs.length < 1) {
return false;
}
Map<String, String> migrationCandidates = new HashMap<>();
// noinspection ConstantConditions
Stream.of(storeTypes).forEach(type -> {
File[] files = basedir.getDetachedFile().toPath().resolve(type.singularEndpointName()).toFile().listFiles((dir, fname) -> fname.endsWith(".json"));
if (files != null) {
Stream.of(files).forEach((f) -> {
String src = Paths.get(type.singularEndpointName(), f.getName()).toString();
String target = Paths.get(MAVEN_PKG_KEY, type.singularEndpointName(), f.getName()).toString();
migrationCandidates.put(src, target);
});
}
});
boolean changed = false;
for (Map.Entry<String, String> entry : migrationCandidates.entrySet()) {
DataFile src = dataFileManager.getDataFile(INDY_STORE, entry.getKey());
DataFile target = dataFileManager.getDataFile(INDY_STORE, entry.getValue());
if (target.exists()) {
continue;
}
DataFile targetDir = target.getParent();
if (!targetDir.exists() && !targetDir.mkdirs()) {
throw new IndyLifecycleException("Cannot make directory: %s.", targetDir.getPath());
} else if (!targetDir.isDirectory()) {
throw new IndyLifecycleException("Not a directory: %s.", targetDir.getPath());
}
try {
logger.info("Migrating definition {}", src.getPath());
final String json = src.readString();
final String migrated = objectMapper.patchLegacyStoreJson(json);
target.writeString(migrated, summary);
changed = true;
} catch (final IOException e) {
throw new IndyLifecycleException("Failed to migrate artifact-store definition from: %s to: %s. Reason: %s", e, src, target, e.getMessage(), e);
}
}
if (changed) {
try {
storeDataManager.reload();
} catch (IndyDataException e) {
throw new IndyLifecycleException("Failed to reload migrated store definitions: %s", e, e.getMessage());
}
}
return changed;
}
Aggregations