use of io.hops.hopsworks.exceptions.ProvenanceException in project hopsworks by logicalclocks.
the class ProvStateController method provFileState.
/**
* @param <R> parsed elastic item
* @param <S1> intermediate result wrapped in Try
* @param <S2> final result
* @return
* @throws ProvenanceException
*/
private <R, S1, S2> S2 provFileState(Long projectIId, Map<ProvParser.Field, ProvParser.FilterVal> fileStateFilters, List<Pair<ProvParser.Field, SortOrder>> fileStateSortBy, Map<String, String> xAttrsFilters, Map<String, String> likeXAttrsFilters, Set<String> hasXAttrsFilters, List<ProvStateParamBuilder.SortE> xattrSortBy, Integer offset, Integer limit, HandlerFactory<R, S1, S2> handlerFactory) throws ProvenanceException {
CheckedSupplier<SearchRequest, ProvenanceException> srF = ElasticHelper.baseSearchRequest(settings.getProvFileIndex(projectIId), settings.getElasticDefaultScrollPageSize()).andThen(filterByStateParams(fileStateFilters, xAttrsFilters, likeXAttrsFilters, hasXAttrsFilters)).andThen(ElasticHelper.withFileStateOrder(fileStateSortBy, xattrSortBy)).andThen(ElasticHelper.withPagination(offset, limit, settings.getElasticMaxScrollPageSize()));
SearchRequest request = srF.get();
Pair<Long, Try<S1>> searchResult;
try {
searchResult = client.search(request, handlerFactory.getHandler());
} catch (ElasticException e) {
String msg = "provenance - elastic query problem";
throw ProvHelper.fromElastic(e, msg, msg + " - file state");
}
return handlerFactory.checkedResult(searchResult);
}
use of io.hops.hopsworks.exceptions.ProvenanceException in project hopsworks by logicalclocks.
the class ModelsBuilder method build.
// Build collection
public ModelDTO build(UriInfo uriInfo, ResourceRequest resourceRequest, Users user, Project userProject, Project modelRegistryProject) throws ModelRegistryException, GenericException, SchematizedTagException, MetadataException {
ModelDTO dto = new ModelDTO();
uri(dto, uriInfo, userProject, modelRegistryProject);
expand(dto, resourceRequest);
dto.setCount(0l);
if (dto.isExpand()) {
validatePagination(resourceRequest);
ProvStateDTO fileState;
try {
Pair<ProvStateParamBuilder, ModelRegistryDTO> provFilesParamBuilder = buildModelProvenanceParams(userProject, modelRegistryProject, resourceRequest);
if (provFilesParamBuilder.getValue1() == null) {
// no endpoint - no results
return dto;
}
fileState = provenanceController.provFileStateList(provFilesParamBuilder.getValue1().getParentProject(), provFilesParamBuilder.getValue0());
List<ProvStateDTO> models = new LinkedList<>(fileState.getItems());
dto.setCount(fileState.getCount());
String modelsDatasetPath = modelUtils.getModelsDatasetPath(userProject, modelRegistryProject);
for (ProvStateDTO fileProvStateHit : models) {
ModelDTO modelDTO = build(uriInfo, resourceRequest, user, userProject, modelRegistryProject, fileProvStateHit, modelsDatasetPath);
if (modelDTO != null) {
dto.addItem(modelDTO);
}
}
} catch (ProvenanceException e) {
if (ProvHelper.missingMappingForField(e)) {
LOGGER.log(Level.WARNING, "Could not find elastic mapping for experiments query", e);
return dto;
} else {
throw new ModelRegistryException(RESTCodes.ModelRegistryErrorCode.MODEL_LIST_FAILED, Level.FINE, "Unable to list models for project " + modelRegistryProject.getName(), e.getMessage(), e);
}
} catch (DatasetException e) {
throw new ModelRegistryException(RESTCodes.ModelRegistryErrorCode.MODEL_LIST_FAILED, Level.FINE, "Unable to list models for project " + modelRegistryProject.getName(), e.getMessage(), e);
}
}
return dto;
}
use of io.hops.hopsworks.exceptions.ProvenanceException in project hopsworks by logicalclocks.
the class HiveController method createDatasetDb.
/**
* Creates a Hopsworks dataset of a Hive database
*
* @param project the project of the hive database and the place where the dataset will reside
* @param user the user making the request
* @param dfso dfso
* @param dbName name of the hive database
* @param datasetType the type of database (regular HiveDB or a FeaturestoreDB)
* @param featurestore the featurestore with extended metadata of the dataset in case of type featurestoreDB,
* defaults to null.
* @throws IOException
*/
@TransactionAttribute(TransactionAttributeType.NEVER)
public void createDatasetDb(Project project, Users user, DistributedFileSystemOps dfso, String dbName, DatasetType datasetType, Featurestore featurestore, ProvTypeDTO metaStatus) throws IOException {
if (datasetType != DatasetType.HIVEDB && datasetType != DatasetType.FEATURESTORE) {
throw new IllegalArgumentException("Invalid dataset type for hive database");
}
// Hive database names are case insensitive and lower case
Path dbPath = getDbPath(dbName);
Inode dbInode = inodeController.getInodeAtPath(dbPath.toString());
// Persist Hive db as dataset in the Hopsworks database
// Make the dataset editable by default
Dataset dbDataset = new Dataset(dbInode, project, DatasetAccessPermission.EDITABLE);
dbDataset.setDsType(datasetType);
dbDataset.setSearchable(true);
dbDataset.setFeatureStore(featurestore);
datasetFacade.persistDataset(dbDataset);
try {
// Assign database directory to the user and project group
hdfsUsersBean.createDatasetGroupsAndSetPermissions(user, project, dbDataset, dbPath, dfso);
fsProvenanceCtrl.updateHiveDatasetProvCore(project, dbPath.toString(), metaStatus, dfso);
datasetController.logDataset(project, dbDataset, OperationType.Add);
activityFacade.persistActivity(ActivityFacade.NEW_DATA + dbDataset.getName(), project, user, ActivityFlag.DATASET);
// Set the default quota
switch(datasetType) {
case HIVEDB:
if (settings.getHiveDbDefaultQuota() > -1) {
dfso.setHdfsSpaceQuotaInMBs(dbPath, settings.getHiveDbDefaultQuota());
}
break;
case FEATURESTORE:
if (settings.getFeaturestoreDbDefaultQuota() > -1) {
dfso.setHdfsSpaceQuotaInMBs(dbPath, settings.getFeaturestoreDbDefaultQuota());
}
break;
}
projectFacade.setTimestampQuotaUpdate(project, new Date());
} catch (IOException | ProvenanceException e) {
logger.log(Level.SEVERE, "Cannot assign Hive database directory " + dbPath.toString() + " to correct user/group. Trace: " + e);
// Remove the database directory and cleanup the metadata
try {
dfso.rm(dbPath, true);
} catch (IOException rmEx) {
// Nothing we can really do here
logger.log(Level.SEVERE, "Cannot delete Hive database directory: " + dbPath.toString() + " Trace: " + rmEx);
}
throw new IOException(e);
}
}
use of io.hops.hopsworks.exceptions.ProvenanceException in project hopsworks by logicalclocks.
the class ProvOpsParamBuilder method aggregations.
public ProvOpsParamBuilder aggregations(Set<String> aggregations) throws ProvenanceException {
if (aggregations == null) {
return this;
}
for (String agg : aggregations) {
try {
ProvOpsAggregations aggregation = ProvOpsAggregations.valueOf(agg);
withAggregation(aggregation);
} catch (NullPointerException | IllegalArgumentException e) {
String msg = "aggregation" + agg + " not supported - supported:" + EnumSet.allOf(ProvOpsAggregations.class);
throw new ProvenanceException(RESTCodes.ProvenanceErrorCode.BAD_REQUEST, Level.INFO, msg, "exception extracting aggregations");
}
}
return this;
}
use of io.hops.hopsworks.exceptions.ProvenanceException in project hopsworks by logicalclocks.
the class ProvStateController method provFileStateList.
public ProvStateDTO provFileStateList(Project project, ProvStateParamBuilder params) throws ProvenanceException {
if (params.base.pagination != null && !params.extensions.appStateFilter.isEmpty()) {
String msg = "cannot use pagination with app state filtering";
throw new ProvenanceException(RESTCodes.ProvenanceErrorCode.UNSUPPORTED, Level.INFO, msg);
}
ProvStateDTO fileStates = provFileState(project, params.base, new HandlerFactory.BaseList(), Provenance.getProjectIndex(project));
if (params.extensions.hasAppExpansion()) {
// After this filter the fileStates based on the results of the appState query
for (ProvStateDTO fileState : fileStates.getItems()) {
Optional<String> appId = getAppId(fileState);
if (appId.isPresent()) {
params.withAppExpansion(appId.get());
}
}
Map<String, Map<Provenance.AppState, ProvAppStateElastic>> appExps = appCtrl.provAppState(params.extensions.appStateFilter);
Iterator<ProvStateDTO> fileStateIt = fileStates.getItems().iterator();
while (fileStateIt.hasNext()) {
ProvStateDTO fileState = fileStateIt.next();
Optional<String> appId = getAppId(fileState);
if (appId.isPresent() && appExps.containsKey(appId.get())) {
Map<Provenance.AppState, ProvAppStateElastic> appExp = appExps.get(appId.get());
fileState.setAppState(ProvAppHelper.buildAppState(appExp));
} else {
fileState.setAppState(ProvAppStateDTO.unknown());
}
}
}
return fileStates;
}
Aggregations