use of org.apache.commons.lang3.StringUtils.isEmpty in project adeptj-runtime by AdeptJ.
the class Server method populateCredentialsStore.
private void populateCredentialsStore(Config undertowConf) {
try (MVStore store = MVStore.open(MV_CREDENTIALS_STORE)) {
MVMap<String, String> credentials = store.openMap(H2_MAP_ADMIN_CREDENTIALS);
// put the default password only when it is not set from web console.
undertowConf.getObject(KEY_USER_CREDENTIAL_MAPPING).entrySet().stream().filter(entry -> StringUtils.isEmpty(credentials.get(entry.getKey()))).forEach(entry -> credentials.put(entry.getKey(), ((String) entry.getValue().unwrapped()).substring(PWD_START_INDEX)));
}
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project gravitee-management-rest-api by gravitee-io.
the class PageServiceImpl method createPage.
private PageEntity createPage(String apiId, NewPageEntity newPageEntity, String environmentId, String pageId) {
try {
logger.debug("Create page {} for API {}", newPageEntity, apiId);
String id = pageId != null && UUID.fromString(pageId) != null ? pageId : UuidString.generateRandom();
PageType newPageType = newPageEntity.getType();
// handle default visibility
if (newPageEntity.getVisibility() == null) {
newPageEntity.setVisibility(Visibility.PUBLIC);
}
// create page revision only for :
// - SWAGGER
// - Markdown
// - Translation
boolean createRevision = false;
// Useful when import api with pages in old version
if (newPageEntity.getExcludedGroups() != null && !newPageEntity.getExcludedGroups().isEmpty()) {
newPageEntity.setVisibility(Visibility.PRIVATE);
newPageEntity.setExcludedAccessControls(true);
newPageEntity.setAccessControls(newPageEntity.getExcludedGroups().stream().map((groupId -> {
AccessControlEntity accessControl = new AccessControlEntity();
accessControl.setReferenceType("GROUP");
accessControl.setReferenceId(groupId);
return accessControl;
})).collect(Collectors.toSet()));
}
if (PageType.TRANSLATION.equals(newPageType)) {
checkTranslationConsistency(newPageEntity.getParentId(), newPageEntity.getConfiguration(), true);
Optional<Page> optTranslatedPage = this.pageRepository.findById(newPageEntity.getParentId());
if (optTranslatedPage.isPresent()) {
newPageEntity.setPublished(optTranslatedPage.get().isPublished());
// create revision only for Swagger & Markdown page
createRevision = isSwaggerOrMarkdown(optTranslatedPage.get().getType());
}
}
if (PageType.FOLDER.equals(newPageType)) {
checkFolderConsistency(newPageEntity);
}
if (PageType.LINK.equals(newPageType)) {
String resourceType = newPageEntity.getConfiguration().get(PageConfigurationKeys.LINK_RESOURCE_TYPE);
String content = newPageEntity.getContent();
if (content == null || content.isEmpty()) {
throw new PageActionException(PageType.LINK, "be created. It must have a URL, a page Id or a category Id");
}
if ("root".equals(content) || PageConfigurationKeys.LINK_RESOURCE_TYPE_EXTERNAL.equals(resourceType) || PageConfigurationKeys.LINK_RESOURCE_TYPE_CATEGORY.equals(resourceType)) {
newPageEntity.setPublished(true);
} else {
Optional<Page> optionalRelatedPage = pageRepository.findById(content);
if (optionalRelatedPage.isPresent()) {
Page relatedPage = optionalRelatedPage.get();
checkLinkRelatedPageType(relatedPage);
newPageEntity.setPublished(relatedPage.isPublished());
newPageEntity.setVisibility(Visibility.valueOf(relatedPage.getVisibility()));
}
}
}
if (PageType.SWAGGER == newPageType || PageType.MARKDOWN == newPageType) {
checkMarkdownOrSwaggerConsistency(newPageEntity, newPageType);
createRevision = true;
}
Page page = convert(newPageEntity);
if (page.getSource() != null) {
fetchPage(page);
}
page.setId(id);
if (StringUtils.isEmpty(apiId)) {
page.setReferenceId(environmentId);
page.setReferenceType(PageReferenceType.ENVIRONMENT);
} else {
page.setReferenceId(apiId);
page.setReferenceType(PageReferenceType.API);
}
// Set date fields
page.setCreatedAt(new Date());
page.setUpdatedAt(page.getCreatedAt());
List<String> messages = validateSafeContent(page);
Page createdPage = this.pageRepository.create(page);
if (createRevision) {
createPageRevision(createdPage);
}
// only one homepage is allowed
onlyOneHomepage(page);
createAuditLog(PageReferenceType.API.equals(page.getReferenceType()) ? page.getReferenceId() : null, PAGE_CREATED, page.getCreatedAt(), null, page);
PageEntity pageEntity = convert(createdPage);
if (messages != null && messages.size() > 0) {
pageEntity.setMessages(messages);
}
// add document in search engine
index(pageEntity);
return pageEntity;
} catch (TechnicalException | FetcherException ex) {
logger.error("An error occurs while trying to create {}", newPageEntity, ex);
throw new TechnicalManagementException("An error occurs while trying create " + newPageEntity, ex);
}
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project pact-jvm by DiUS.
the class PactBrokerLoader method loadPactsForProvider.
private List<Pact> loadPactsForProvider(final String providerName, final String tag) throws IOException {
LOGGER.debug("Loading pacts from pact broker for provider " + providerName + " and tag " + tag);
URIBuilder uriBuilder = new URIBuilder().setScheme(parseExpressions(pactBrokerProtocol)).setHost(parseExpressions(pactBrokerHost)).setPort(Integer.parseInt(parseExpressions(pactBrokerPort)));
try {
List<ConsumerInfo> consumers;
PactBrokerClient pactBrokerClient = newPactBrokerClient(uriBuilder.build());
if (StringUtils.isEmpty(tag)) {
consumers = pactBrokerClient.fetchConsumers(providerName);
} else {
consumers = pactBrokerClient.fetchConsumersWithTag(providerName, tag);
}
if (failIfNoPactsFound && consumers.isEmpty()) {
throw new NoPactsFoundException("No consumer pacts were found for provider '" + providerName + "' and tag '" + tag + "'. (URL " + pactBrokerClient.getUrlForProvider(providerName, tag) + ")");
}
return consumers.stream().map(consumer -> this.loadPact(consumer, pactBrokerClient.getOptions())).collect(toList());
} catch (URISyntaxException e) {
throw new IOException("Was not able load pacts from broker as the broker URL was invalid", e);
}
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project kie-wb-common by kiegroup.
the class BpmnFileIndexer method buildProcessDefinition.
private List<BpmnProcessDataEventListener> buildProcessDefinition(String bpmn2Content, ClassLoader moduleClassLoader) throws IllegalArgumentException {
if (StringUtils.isEmpty(bpmn2Content)) {
return Collections.<BpmnProcessDataEventListener>emptyList();
}
// Set class loader
KnowledgeBuilder kbuilder = null;
if (moduleClassLoader != null) {
KnowledgeBuilderConfigurationImpl pconf = new KnowledgeBuilderConfigurationImpl(moduleClassLoader);
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(pconf);
} else {
kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
}
// Build
kbuilder.add(new ByteArrayResource(bpmn2Content.getBytes()), ResourceType.BPMN2);
if (kbuilder.hasErrors()) {
for (KnowledgeBuilderError error : kbuilder.getErrors()) {
logger.error("Error: {}", error.getMessage());
}
logger.debug("Process Cannot be Parsed! \n {} \n", bpmn2Content);
return Collections.<BpmnProcessDataEventListener>emptyList();
}
// Retrieve ProcessInfoHolder
List<BpmnProcessDataEventListener> processDataList = new ArrayList<>();
kbuilder.getKnowledgePackages().forEach(pkg -> pkg.getProcesses().forEach(p -> {
BpmnProcessDataEventListener processData = (BpmnProcessDataEventListener) p.getMetaData().get(BpmnProcessDataEventListener.NAME);
processDataList.add(processData);
}));
return processDataList;
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project nifi by apache.
the class PutElasticsearchHttp method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final int batchSize = context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger();
final List<FlowFile> flowFiles = session.get(batchSize);
if (flowFiles.isEmpty()) {
return;
}
final String id_attribute = context.getProperty(ID_ATTRIBUTE).getValue();
// Authentication
final String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue();
final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue();
OkHttpClient okHttpClient = getClient();
final ComponentLog logger = getLogger();
// Keep track of the list of flow files that need to be transferred. As they are transferred, remove them from the list.
List<FlowFile> flowFilesToTransfer = new LinkedList<>(flowFiles);
final StringBuilder sb = new StringBuilder();
final String baseUrl = trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
HttpUrl.Builder urlBuilder = HttpUrl.parse(baseUrl).newBuilder().addPathSegment("_bulk");
// Find the user-added properties and set them as query parameters on the URL
for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
PropertyDescriptor pd = property.getKey();
if (pd.isDynamic()) {
if (property.getValue() != null) {
urlBuilder = urlBuilder.addQueryParameter(pd.getName(), context.getProperty(pd).evaluateAttributeExpressions().getValue());
}
}
}
final URL url = urlBuilder.build().url();
for (FlowFile file : flowFiles) {
final String index = context.getProperty(INDEX).evaluateAttributeExpressions(file).getValue();
final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(file).getValue());
if (StringUtils.isEmpty(index)) {
logger.error("No value for index in for {}, transferring to failure", new Object[] { id_attribute, file });
flowFilesToTransfer.remove(file);
session.transfer(file, REL_FAILURE);
continue;
}
final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(file).getValue();
String indexOp = context.getProperty(INDEX_OP).evaluateAttributeExpressions(file).getValue();
if (StringUtils.isEmpty(indexOp)) {
logger.error("No Index operation specified for {}, transferring to failure.", new Object[] { file });
flowFilesToTransfer.remove(file);
session.transfer(file, REL_FAILURE);
continue;
}
switch(indexOp.toLowerCase()) {
case "index":
case "update":
case "upsert":
case "delete":
break;
default:
logger.error("Index operation {} not supported for {}, transferring to failure.", new Object[] { indexOp, file });
flowFilesToTransfer.remove(file);
session.transfer(file, REL_FAILURE);
continue;
}
final String id = (id_attribute != null) ? file.getAttribute(id_attribute) : null;
// a missing ID indicates one is to be auto-generated by Elasticsearch
if (id == null && !indexOp.equalsIgnoreCase("index")) {
logger.error("Index operation {} requires a valid identifier value from a flow file attribute, transferring to failure.", new Object[] { indexOp, file });
flowFilesToTransfer.remove(file);
session.transfer(file, REL_FAILURE);
continue;
}
final StringBuilder json = new StringBuilder();
session.read(file, in -> {
json.append(IOUtils.toString(in, charset).replace("\r\n", " ").replace('\n', ' ').replace('\r', ' '));
});
if (indexOp.equalsIgnoreCase("index")) {
sb.append("{\"index\": { \"_index\": \"");
sb.append(index);
sb.append("\", \"_type\": \"");
sb.append(docType);
sb.append("\"");
if (!StringUtils.isEmpty(id)) {
sb.append(", \"_id\": \"");
sb.append(id);
sb.append("\"");
}
sb.append("}}\n");
sb.append(json);
sb.append("\n");
} else if (indexOp.equalsIgnoreCase("upsert") || indexOp.equalsIgnoreCase("update")) {
sb.append("{\"update\": { \"_index\": \"");
sb.append(index);
sb.append("\", \"_type\": \"");
sb.append(docType);
sb.append("\", \"_id\": \"");
sb.append(id);
sb.append("\" }\n");
sb.append("{\"doc\": ");
sb.append(json);
sb.append(", \"doc_as_upsert\": ");
sb.append(indexOp.equalsIgnoreCase("upsert"));
sb.append(" }\n");
} else if (indexOp.equalsIgnoreCase("delete")) {
sb.append("{\"delete\": { \"_index\": \"");
sb.append(index);
sb.append("\", \"_type\": \"");
sb.append(docType);
sb.append("\", \"_id\": \"");
sb.append(id);
sb.append("\" }\n");
}
}
if (!flowFilesToTransfer.isEmpty()) {
RequestBody requestBody = RequestBody.create(MediaType.parse("application/json"), sb.toString());
final Response getResponse;
try {
getResponse = sendRequestToElasticsearch(okHttpClient, url, username, password, "PUT", requestBody);
} catch (final Exception e) {
logger.error("Routing to {} due to exception: {}", new Object[] { REL_FAILURE.getName(), e }, e);
flowFilesToTransfer.forEach((flowFileToTransfer) -> {
flowFileToTransfer = session.penalize(flowFileToTransfer);
session.transfer(flowFileToTransfer, REL_FAILURE);
});
flowFilesToTransfer.clear();
return;
}
final int statusCode = getResponse.code();
if (isSuccess(statusCode)) {
ResponseBody responseBody = getResponse.body();
try {
final byte[] bodyBytes = responseBody.bytes();
JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
boolean errors = responseJson.get("errors").asBoolean(false);
if (errors) {
ArrayNode itemNodeArray = (ArrayNode) responseJson.get("items");
if (itemNodeArray.size() > 0) {
// All items are returned whether they succeeded or failed, so iterate through the item array
// at the same time as the flow file list, moving each to success or failure accordingly,
// but only keep the first error for logging
String errorReason = null;
for (int i = itemNodeArray.size() - 1; i >= 0; i--) {
JsonNode itemNode = itemNodeArray.get(i);
if (flowFilesToTransfer.size() > i) {
FlowFile flowFile = flowFilesToTransfer.remove(i);
int status = itemNode.findPath("status").asInt();
if (!isSuccess(status)) {
if (errorReason == null) {
// Use "result" if it is present; this happens for status codes like 404 Not Found, which may not have an error/reason
String reason = itemNode.findPath("//result").asText();
if (StringUtils.isEmpty(reason)) {
// If there was no result, we expect an error with a string description in the "reason" field
reason = itemNode.findPath("//error/reason").asText();
}
errorReason = reason;
logger.error("Failed to process {} due to {}, transferring to failure", new Object[] { flowFile, errorReason });
}
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
} else {
session.transfer(flowFile, REL_SUCCESS);
// Record provenance event
session.getProvenanceReporter().send(flowFile, url.toString());
}
}
}
}
}
// Transfer any remaining flowfiles to success
flowFilesToTransfer.forEach(file -> {
session.transfer(file, REL_SUCCESS);
// Record provenance event
session.getProvenanceReporter().send(file, url.toString());
});
} catch (IOException ioe) {
// Something went wrong when parsing the response, log the error and route to failure
logger.error("Error parsing Bulk API response: {}", new Object[] { ioe.getMessage() }, ioe);
session.transfer(flowFilesToTransfer, REL_FAILURE);
context.yield();
}
} else if (statusCode / 100 == 5) {
// 5xx -> RETRY, but a server error might last a while, so yield
logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to retry. This is likely a server problem, yielding...", new Object[] { statusCode, getResponse.message() });
session.transfer(flowFilesToTransfer, REL_RETRY);
context.yield();
} else {
// 1xx, 3xx, 4xx, etc. -> NO RETRY
logger.warn("Elasticsearch returned code {} with message {}, transferring flow file to failure", new Object[] { statusCode, getResponse.message() });
session.transfer(flowFilesToTransfer, REL_FAILURE);
}
getResponse.close();
}
}
Aggregations