use of io.hops.hopsworks.common.featurestore.OptionDTO in project hopsworks by logicalclocks.
the class FeaturestoreADLSConnectorController method getSparkOptionsGen2.
private List<OptionDTO> getSparkOptionsGen2(FeaturestoreADLSConnector adlsConnector, String serviceCredential) {
List<OptionDTO> sparkOptions = new ArrayList<>();
sparkOptions.add(new OptionDTO("fs.azure.account.auth.type." + adlsConnector.getAccountName() + ".dfs.core.windows.net", "OAuth"));
sparkOptions.add(new OptionDTO("fs.azure.account.oauth.provider.type." + adlsConnector.getAccountName() + ".dfs.core.windows.net", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"));
sparkOptions.add(new OptionDTO("fs.azure.account.oauth2.client.id." + adlsConnector.getAccountName() + ".dfs.core.windows.net", adlsConnector.getApplicationId()));
sparkOptions.add(new OptionDTO("fs.azure.account.oauth2.client.secret." + adlsConnector.getAccountName() + ".dfs.core.windows.net", serviceCredential));
sparkOptions.add(new OptionDTO("fs.azure.account.oauth2.client.endpoint." + adlsConnector.getAccountName() + ".dfs.core.windows.net", "https://login.microsoftonline.com/" + adlsConnector.getDirectoryId() + "/oauth2/token"));
return sparkOptions;
}
use of io.hops.hopsworks.common.featurestore.OptionDTO in project hopsworks by logicalclocks.
the class TrainingDatasetService method compute.
@POST
@Path("/{trainingDatasetId}/compute")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Setup a job to compute and write a training dataset", response = JobDTO.class)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_VIEW, ApiScope.FEATURESTORE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response compute(@Context UriInfo uriInfo, @Context SecurityContext sc, @PathParam("trainingDatasetId") Integer trainingDatasetId, TrainingDatasetJobConf trainingDatasetJobConf) throws FeaturestoreException, ServiceException, JobException, ProjectException, GenericException {
verifyIdProvided(trainingDatasetId);
Users user = jWTHelper.getUserPrincipal(sc);
TrainingDataset trainingDataset = trainingDatasetController.getTrainingDatasetById(featurestore, trainingDatasetId);
Map<String, String> writeOptions = null;
if (trainingDatasetJobConf.getWriteOptions() != null) {
writeOptions = trainingDatasetJobConf.getWriteOptions().stream().collect(Collectors.toMap(OptionDTO::getName, OptionDTO::getValue));
}
Jobs job = fsJobManagerController.setupTrainingDatasetJob(project, user, trainingDataset, trainingDatasetJobConf.getQuery(), trainingDatasetJobConf.getOverwrite(), writeOptions, trainingDatasetJobConf.getSparkJobConfiguration());
JobDTO jobDTO = jobsBuilder.build(uriInfo, new ResourceRequest(ResourceRequest.Name.JOBS), job);
return Response.created(jobDTO.getHref()).entity(jobDTO).build();
}
use of io.hops.hopsworks.common.featurestore.OptionDTO in project hopsworks by logicalclocks.
the class FeaturegroupService method ingestionJob.
@POST
@Path("/{featuregroupId}/ingestion")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_SCIENTIST, AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.FEATURESTORE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiOperation(value = "Prepares environment for uploading data to ingest into the feature group", response = IngestionJobDTO.class)
public Response ingestionJob(@Context SecurityContext sc, @Context UriInfo uriInfo, @ApiParam(value = "Id of the featuregroup", required = true) @PathParam("featuregroupId") Integer featuregroupId, IngestionJobConf ingestionJobConf) throws DatasetException, HopsSecurityException, FeaturestoreException, JobException {
Users user = jWTHelper.getUserPrincipal(sc);
verifyIdProvided(featuregroupId);
Featuregroup featuregroup = featuregroupController.getFeaturegroupById(featurestore, featuregroupId);
Map<String, String> dataOptions = null;
if (ingestionJobConf.getDataOptions() != null) {
dataOptions = ingestionJobConf.getDataOptions().stream().collect(Collectors.toMap(OptionDTO::getName, OptionDTO::getValue));
}
Map<String, String> writeOptions = null;
if (ingestionJobConf.getWriteOptions() != null) {
writeOptions = ingestionJobConf.getWriteOptions().stream().collect(Collectors.toMap(OptionDTO::getName, OptionDTO::getValue));
}
IngestionJob ingestionJob = fsJobManagerController.setupIngestionJob(project, user, featuregroup, ingestionJobConf.getSparkJobConfiguration(), ingestionJobConf.getDataFormat(), writeOptions, dataOptions);
IngestionJobDTO ingestionJobDTO = ingestionJobBuilder.build(uriInfo, project, featuregroup, ingestionJob);
return Response.ok().entity(ingestionJobDTO).build();
}
use of io.hops.hopsworks.common.featurestore.OptionDTO in project hopsworks by logicalclocks.
the class TestStorageConnectorUtil method testFromOptions.
@Test
public void testFromOptions() throws Exception {
List<OptionDTO> optionList = Arrays.asList(new OptionDTO("option1", "value1"), new OptionDTO("option2", null), new OptionDTO("option3", "null"), new OptionDTO("option4", ""));
String result = storageConnectorUtil.fromOptions(optionList);
Assert.assertEquals("[{\"name\":\"option1\",\"value\":\"value1\"},{\"name\":\"option2\"},{\"name\":\"option3\",\"value\":\"null\"},{\"name\":\"option4\",\"value\":\"\"}]", result);
}
use of io.hops.hopsworks.common.featurestore.OptionDTO in project hopsworks by logicalclocks.
the class OnlineFeaturestoreController method createJdbcConnectorForOnlineFeaturestore.
/**
* Utility function for create a JDBC connection to the online featurestore for a particular user.
*
* @param onlineDbUsername the db-username of the connection
* @param featurestore the featurestore metadata
* @param dbName name of the MySQL database
* @return DTO of the newly created connector
* @throws FeaturestoreException
*/
public void createJdbcConnectorForOnlineFeaturestore(String onlineDbUsername, Featurestore featurestore, String dbName) throws FeaturestoreException {
String connectorName = onlineDbUsername + FeaturestoreConstants.ONLINE_FEATURE_STORE_CONNECTOR_SUFFIX;
if (featurestoreConnectorFacade.findByFeaturestoreName(featurestore, connectorName).isPresent()) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ILLEGAL_STORAGE_CONNECTOR_NAME, Level.FINE, "a storage connector with that name already exists");
}
FeaturestoreConnector featurestoreConnector = new FeaturestoreConnector();
featurestoreConnector.setName(connectorName);
featurestoreConnector.setDescription("JDBC connection to Hopsworks Project Online " + "Feature Store NDB Database for user: " + onlineDbUsername);
featurestoreConnector.setFeaturestore(featurestore);
featurestoreConnector.setConnectorType(FeaturestoreConnectorType.JDBC);
FeaturestoreJdbcConnector featurestoreJdbcConnector = new FeaturestoreJdbcConnector();
featurestoreJdbcConnector.setConnectionString(settings.getFeaturestoreJdbcUrl() + dbName + "?useSSL=false&allowPublicKeyRetrieval=true");
List<OptionDTO> arguments = new ArrayList<>();
arguments.add(new OptionDTO(FeaturestoreConstants.ONLINE_FEATURE_STORE_JDBC_PASSWORD_ARG, FeaturestoreConstants.ONLINE_FEATURE_STORE_CONNECTOR_PASSWORD_TEMPLATE));
arguments.add(new OptionDTO(FeaturestoreConstants.ONLINE_FEATURE_STORE_JDBC_USER_ARG, onlineDbUsername));
arguments.add(new OptionDTO(FeaturestoreConstants.ONLINE_FEATURE_STORE_JDBC_DRIVER_ARG, MYSQL_DRIVER));
arguments.add(new OptionDTO("isolationLevel", "NONE"));
arguments.add(new OptionDTO("batchsize", "500"));
featurestoreJdbcConnector.setArguments(storageConnectorUtil.fromOptions(arguments));
featurestoreConnector.setJdbcConnector(featurestoreJdbcConnector);
featurestoreConnectorFacade.update(featurestoreConnector);
}
Aggregations