use of com.thinkbiganalytics.nifi.rest.model.NifiProperty in project kylo by Teradata.
the class NifiPropertyUtil method matchAndSetPropertyValues.
/**
* Updates the values of the destination properties that match the specified source properties.
*
* <p>Matches are made using the processor ID or name and the property key.</p>
*
* @param sourceGroupName name of the source process group
* @param destinationGroupName name of the destination process group
* @param destinationProperties properties of processors in the destination group
* @param sourceProperties properties of processors in the source group
* @return modified properties from the destination group
*/
@Nonnull
public static List<NifiProperty> matchAndSetPropertyValues(@Nonnull final String sourceGroupName, @Nonnull final String destinationGroupName, @Nonnull final List<NifiProperty> destinationProperties, @Nullable final List<NifiProperty> sourceProperties) {
// Shortcut if there are no properties
if (destinationProperties.isEmpty() || sourceProperties == null || sourceProperties.isEmpty()) {
return Collections.emptyList();
}
// Create mappings for destination properties
final Map<String, NifiProperty> propertyById = new HashMap<>(destinationProperties.size());
final Map<String, NifiProperty> propertyByName = new HashMap<>(destinationProperties.size());
final Map<String, String> groupIdByName = new HashMap<>(destinationProperties.size());
final Map<String, String> processorIdByName = new HashMap<>(destinationProperties.size());
for (final NifiProperty property : destinationProperties) {
propertyById.put(property.getIdKey(), property);
propertyByName.put(property.getNameKey(), property);
groupIdByName.put(property.getProcessGroupName(), property.getProcessGroupId());
processorIdByName.put(property.getProcessorName(), property.getProcessorId());
}
// Match and update destination properties
final List<NifiProperty> modifiedProperties = new ArrayList<>(destinationProperties.size());
for (final NifiProperty sourceProperty : sourceProperties) {
// Update source property to match destination
sourceProperty.setTemplateProperty(new NifiProperty(sourceProperty));
if (sourceProperty.getProcessGroupName().equalsIgnoreCase(sourceGroupName)) {
sourceProperty.setProcessGroupName(destinationGroupName);
}
// Find destination property
NifiProperty destinationProperty = propertyById.get(sourceProperty.getIdKey());
if (destinationProperty == null) {
destinationProperty = propertyByName.get(sourceProperty.getNameKey());
}
if (destinationProperty != null) {
sourceProperty.setProcessGroupId(groupIdByName.get(destinationProperty.getProcessGroupName()));
sourceProperty.setProcessorId(processorIdByName.get(destinationProperty.getProcessorName()));
final String sourceValue = sourceProperty.getValue();
if (isValidPropertyValue(destinationProperty, sourceValue)) {
destinationProperty.setValue(sourceValue);
modifiedProperties.add(destinationProperty);
}
}
}
return modifiedProperties;
}
use of com.thinkbiganalytics.nifi.rest.model.NifiProperty in project kylo by Teradata.
the class NifiPropertyUtil method getPropertiesForProcessor.
/**
* Return a list of properties on a gi en processor
*
* @param processGroup the processors group
* @param processor the processor
* @param propertyDescriptorTransform the transform utility
* @return the list of properties on the processor
*/
public static List<NifiProperty> getPropertiesForProcessor(ProcessGroupDTO processGroup, ProcessorDTO processor, NiFiPropertyDescriptorTransform propertyDescriptorTransform) {
List<NifiProperty> properties = new ArrayList<>();
for (Map.Entry<String, String> entry : processor.getConfig().getProperties().entrySet()) {
PropertyDescriptorDTO descriptorDTO = processor.getConfig().getDescriptors().get(entry.getKey());
if (descriptorDTO != null) {
final NiFiPropertyDescriptor propertyDescriptor = propertyDescriptorTransform.toNiFiPropertyDescriptor(processor.getConfig().getDescriptors().get(entry.getKey()));
final NifiProperty property = new NifiProperty(processor.getParentGroupId(), processor.getId(), entry.getKey(), entry.getValue(), propertyDescriptor);
property.setProcessGroupName(processGroup.getName());
property.setProcessorName(processor.getName());
property.setProcessorType(processor.getType());
properties.add(property);
}
}
return properties;
}
use of com.thinkbiganalytics.nifi.rest.model.NifiProperty in project kylo by Teradata.
the class FeedImporter method validateSensitiveProperties.
private boolean validateSensitiveProperties() {
FeedMetadata metadata = importFeed.getFeedToImport();
// detect any sensitive properties and prompt for input before proceeding
UploadProgressMessage statusMessage = uploadProgressService.addUploadStatus(importFeed.getImportOptions().getUploadKey(), "Validating feed properties.");
List<NifiProperty> sensitiveProperties = metadata.getSensitiveProperties();
ImportUtil.addToImportOptionsSensitiveProperties(importFeedOptions, sensitiveProperties, ImportComponent.FEED_DATA);
boolean valid = ImportUtil.applyImportPropertiesToFeed(metadata, importFeed, ImportComponent.FEED_DATA);
if (!valid) {
statusMessage.update("Validation Error. Additional properties are needed before uploading the feed.", false);
importFeed.setValid(false);
} else {
statusMessage.update("Validated feed properties.", valid);
}
uploadProgressService.completeSection(importFeed.getImportOptions(), ImportSection.Section.VALIDATE_PROPERTIES);
return valid;
}
use of com.thinkbiganalytics.nifi.rest.model.NifiProperty in project kylo by Teradata.
the class FeedRestController method uploadFile.
@POST
@Path("/{feedId}/upload-file")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Uploads files to be ingested by a feed.")
@ApiResponses({ @ApiResponse(code = 200, message = "Files are ready to be ingested."), @ApiResponse(code = 500, message = "Files could not be saved.", response = RestResponseStatus.class) })
public Response uploadFile(@PathParam("feedId") String feedId, FormDataMultiPart multiPart) {
List<NifiProperty> properties = getNifiProperties(feedId);
FileUploadContext context = getFileUploadContext(properties);
if (!context.isValid()) {
throw new InternalServerErrorException("Unable to upload file with empty dropzone or file");
}
List<BodyPart> bodyParts = multiPart.getBodyParts();
List<String> uploadedFiles = new ArrayList<>();
for (BodyPart bodyPart : bodyParts) {
BodyPartEntity entity = (BodyPartEntity) bodyPart.getEntity();
String fileName = bodyPart.getContentDisposition().getFileName();
try {
saveFile(entity.getInputStream(), context);
uploadedFiles.add(fileName);
} catch (AccessDeniedException e) {
String errTemplate = getErrorTemplate(uploadedFiles, "Permission denied attempting to write file [%s] to [%s]. Check with system administrator to ensure this application has write permissions to folder");
String err = String.format(errTemplate, fileName, context.getDropzone());
log.error(err);
throw new InternalServerErrorException(err);
} catch (Exception e) {
String errTemplate = getErrorTemplate(uploadedFiles, "Unexpected exception writing file [%s] to [%s].");
String err = String.format(errTemplate, fileName, context.getDropzone());
log.error(err);
throw new InternalServerErrorException(err, e);
}
}
return Response.ok("").build();
}
use of com.thinkbiganalytics.nifi.rest.model.NifiProperty in project kylo by Teradata.
the class FeedRestController method profileSummary.
@GET
@Path("/{feedId}/profile-summary")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Gets a summary of the feed profiles.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the profile summaries.", response = Map.class, responseContainer = "List"), @ApiResponse(code = 500, message = "The profiles are unavailable.", response = RestResponseStatus.class) })
public Response profileSummary(@PathParam("feedId") String feedId) {
FeedMetadata feedMetadata = getMetadataService().getFeedById(feedId);
final String profileTable = HiveUtils.quoteIdentifier(feedMetadata.getProfileTableName());
String query = "SELECT * from " + profileTable + " where columnname = '(ALL)'";
List<Map<String, Object>> rows = new ArrayList<>();
try {
QueryResult results = hiveService.query(query);
rows.addAll(results.getRows());
// add in the archive date time fields if applicipable
String ARCHIVE_PROCESSOR_TYPE = "com.thinkbiganalytics.nifi.GetTableData";
if (feedMetadata.getInputProcessorType().equalsIgnoreCase(ARCHIVE_PROCESSOR_TYPE)) {
NifiProperty property = NifiPropertyUtil.findPropertyByProcessorType(feedMetadata.getProperties(), ARCHIVE_PROCESSOR_TYPE, "Date Field");
if (property != null && property.getValue() != null) {
String field = property.getValue();
if (field.contains(".")) {
field = StringUtils.substringAfterLast(field, ".");
}
query = "SELECT * from " + profileTable + " where metrictype IN('MIN_TIMESTAMP','MAX_TIMESTAMP') AND columnname = " + HiveUtils.quoteString(field);
QueryResult dateRows = hiveService.query(query);
if (dateRows != null && !dateRows.isEmpty()) {
rows.addAll(dateRows.getRows());
}
}
}
} catch (DataAccessException e) {
if (e.getCause() instanceof org.apache.hive.service.cli.HiveSQLException && e.getCause().getMessage().contains("Table not found")) {
// this exception is ok to swallow since it just means no profile data exists yet
} else if (e.getCause().getMessage().contains("HiveAccessControlException Permission denied")) {
throw new AccessControlException("You do not have permission to execute this hive query");
} else {
throw e;
}
}
return Response.ok(rows).build();
}
Aggregations