use of java.util.Collections in project nifi by apache.
the class SnippetUtils method populateFlowSnippet.
/**
* Populates the specified snippet and returns the details.
*
* @param snippet snippet
* @param recurse recurse
* @param includeControllerServices whether or not to include controller services in the flow snippet dto
* @return snippet
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public FlowSnippetDTO populateFlowSnippet(final Snippet snippet, final boolean recurse, final boolean includeControllerServices, boolean removeInstanceId) {
final FlowSnippetDTO snippetDto = new FlowSnippetDTO(removeInstanceId);
final String groupId = snippet.getParentGroupId();
final ProcessGroup processGroup = flowController.getGroup(groupId);
// ensure the group could be found
if (processGroup == null) {
throw new IllegalStateException("The parent process group for this snippet could not be found.");
}
// We need to ensure that the Controller Services that are added get added to the proper group.
// This can potentially get a little bit tricky. Consider this scenario:
// We have a Process Group G1. Within Process Group G1 is a Controller Service C1.
// Also within G1 is a child Process Group, G2. Within G2 is a child Process Group, G3.
// Within G3 are two child Process Groups: G4 and G5. Within each of these children,
// we have a Processor (P1, P2) that references the Controller Service C1, defined 3 levels above.
// Now, we create a template that encompasses only Process Groups G4 and G5. We need to ensure
// that the Controller Service C1 is included at the 'root' of the template so that those
// Processors within G4 and G5 both have access to the same Controller Service. This can be drawn
// out thus:
//
// G1 -- C1
// |
// |
// G2
// |
// |
// G3
// | \
// | \
// G4 G5
// | |
// | |
// P1 P2
//
// Both P1 and P2 reference C1.
//
// In order to accomplish this, we maintain two collections. First, we keep a Set of all Controller Services that have
// been added. If we add a new Controller Service to the set, then we know it hasn't been added anywhere in the Snippet.
// In that case, we determine the service's group ID. In the flow described above, if we template just groups G4 and G5,
// then we need to include the Controller Service defined at G1. So we also keep a Map of Group ID to controller services
// in that group. If the ParentGroupId of a Controller Service is not in our snippet, then we instead update the parent
// ParentGroupId to be that of our highest-level process group (in this case G3, as that's where the template is created)
// and then add the controller services to that group (NOTE: here, when we say we change the group ID and add to that group,
// we are talking only about the DTO objects that make up the snippet. We do not actually modify the Process Group or the
// Controller Services in our flow themselves!)
final Set<ControllerServiceDTO> allServicesReferenced = new HashSet<>();
final Map<String, FlowSnippetDTO> contentsByGroup = new HashMap<>();
contentsByGroup.put(processGroup.getIdentifier(), snippetDto);
// add any processors
final Set<ControllerServiceDTO> controllerServices = new HashSet<>();
final Set<ProcessorDTO> processors = new LinkedHashSet<>();
if (!snippet.getProcessors().isEmpty()) {
for (final String processorId : snippet.getProcessors().keySet()) {
final ProcessorNode processor = processGroup.getProcessor(processorId);
if (processor == null) {
throw new IllegalStateException("A processor in this snippet could not be found.");
}
processors.add(dtoFactory.createProcessorDto(processor));
if (includeControllerServices) {
// Include all referenced services that are not already included in this snippet.
getControllerServices(processor.getProperties()).stream().filter(svc -> allServicesReferenced.add(svc)).forEach(svc -> {
final String svcGroupId = svc.getParentGroupId();
final String destinationGroupId = contentsByGroup.containsKey(svcGroupId) ? svcGroupId : processGroup.getIdentifier();
svc.setParentGroupId(destinationGroupId);
controllerServices.add(svc);
});
}
}
}
// add any connections
final Set<ConnectionDTO> connections = new LinkedHashSet<>();
if (!snippet.getConnections().isEmpty()) {
for (final String connectionId : snippet.getConnections().keySet()) {
final Connection connection = processGroup.getConnection(connectionId);
if (connection == null) {
throw new IllegalStateException("A connection in this snippet could not be found.");
}
connections.add(dtoFactory.createConnectionDto(connection));
}
}
// add any funnels
final Set<FunnelDTO> funnels = new LinkedHashSet<>();
if (!snippet.getFunnels().isEmpty()) {
for (final String funnelId : snippet.getFunnels().keySet()) {
final Funnel funnel = processGroup.getFunnel(funnelId);
if (funnel == null) {
throw new IllegalStateException("A funnel in this snippet could not be found.");
}
funnels.add(dtoFactory.createFunnelDto(funnel));
}
}
// add any input ports
final Set<PortDTO> inputPorts = new LinkedHashSet<>();
if (!snippet.getInputPorts().isEmpty()) {
for (final String inputPortId : snippet.getInputPorts().keySet()) {
final Port inputPort = processGroup.getInputPort(inputPortId);
if (inputPort == null) {
throw new IllegalStateException("An input port in this snippet could not be found.");
}
inputPorts.add(dtoFactory.createPortDto(inputPort));
}
}
// add any labels
final Set<LabelDTO> labels = new LinkedHashSet<>();
if (!snippet.getLabels().isEmpty()) {
for (final String labelId : snippet.getLabels().keySet()) {
final Label label = processGroup.getLabel(labelId);
if (label == null) {
throw new IllegalStateException("A label in this snippet could not be found.");
}
labels.add(dtoFactory.createLabelDto(label));
}
}
// add any output ports
final Set<PortDTO> outputPorts = new LinkedHashSet<>();
if (!snippet.getOutputPorts().isEmpty()) {
for (final String outputPortId : snippet.getOutputPorts().keySet()) {
final Port outputPort = processGroup.getOutputPort(outputPortId);
if (outputPort == null) {
throw new IllegalStateException("An output port in this snippet could not be found.");
}
outputPorts.add(dtoFactory.createPortDto(outputPort));
}
}
// add any process groups
final Set<ProcessGroupDTO> processGroups = new LinkedHashSet<>();
if (!snippet.getProcessGroups().isEmpty()) {
for (final String childGroupId : snippet.getProcessGroups().keySet()) {
final ProcessGroup childGroup = processGroup.getProcessGroup(childGroupId);
if (childGroup == null) {
throw new IllegalStateException("A process group in this snippet could not be found.");
}
final ProcessGroupDTO childGroupDto = dtoFactory.createProcessGroupDto(childGroup, recurse);
processGroups.add(childGroupDto);
// maintain a listing of visited groups starting with each group in the snippet. this is used to determine
// whether a referenced controller service should be included in the resulting snippet. if the service is
// defined at groupId or one of it's ancestors, its considered outside of this snippet and will only be included
// when the includeControllerServices is set to true. this happens above when considering the processors in this snippet
final Set<String> visitedGroupIds = new HashSet<>();
addControllerServices(childGroup, childGroupDto, allServicesReferenced, includeControllerServices, visitedGroupIds, contentsByGroup, processGroup.getIdentifier());
}
}
// add any remote process groups
final Set<RemoteProcessGroupDTO> remoteProcessGroups = new LinkedHashSet<>();
if (!snippet.getRemoteProcessGroups().isEmpty()) {
for (final String remoteProcessGroupId : snippet.getRemoteProcessGroups().keySet()) {
final RemoteProcessGroup remoteProcessGroup = processGroup.getRemoteProcessGroup(remoteProcessGroupId);
if (remoteProcessGroup == null) {
throw new IllegalStateException("A remote process group in this snippet could not be found.");
}
remoteProcessGroups.add(dtoFactory.createRemoteProcessGroupDto(remoteProcessGroup));
}
}
// Normalize the coordinates based on the locations of the other components
final List<? extends ComponentDTO> components = new ArrayList<>();
components.addAll((Set) processors);
components.addAll((Set) connections);
components.addAll((Set) funnels);
components.addAll((Set) inputPorts);
components.addAll((Set) labels);
components.addAll((Set) outputPorts);
components.addAll((Set) processGroups);
components.addAll((Set) remoteProcessGroups);
normalizeCoordinates(components);
Set<ControllerServiceDTO> updatedControllerServices = snippetDto.getControllerServices();
if (updatedControllerServices == null) {
updatedControllerServices = new HashSet<>();
}
updatedControllerServices.addAll(controllerServices);
snippetDto.setControllerServices(updatedControllerServices);
snippetDto.setProcessors(processors);
snippetDto.setConnections(connections);
snippetDto.setFunnels(funnels);
snippetDto.setInputPorts(inputPorts);
snippetDto.setLabels(labels);
snippetDto.setOutputPorts(outputPorts);
snippetDto.setProcessGroups(processGroups);
snippetDto.setRemoteProcessGroups(remoteProcessGroups);
return snippetDto;
}
use of java.util.Collections in project Gargoyle by callakrsos.
the class DimList method list.
/********************************
* 작성일 : 2017. 4. 24. 작성자 : KYJ
*
* path에 속하는 하위 구성정보 조회
*
* @param path
* @param revision
* @param exceptionHandler
* @return
********************************/
public <T> List<T> list(String projSpec, String path, String fileName, String revision, Function<ItemRevision, T> convert, Consumer<Exception> exceptionHandler) {
List<T> collections = Collections.emptyList();
DimensionsConnection conn = null;
try {
conn = getConnection();
Project projObj = getProject(conn, projSpec);
RepositoryFolder findRepositoryFolderByPath = projObj.findRepositoryFolderByPath(path);
Filter filter = new Filter();
if (ValueUtil.isNotEmpty(fileName))
filter.criteria().add(new Filter.Criterion(SystemAttributes.ITEMFILE_FILENAME, fileName, Filter.Criterion.EQUALS));
if (ValueUtil.equals("-1", revision)) {
filter.criteria().add(new Filter.Criterion(SystemAttributes.IS_LATEST_REV, "Y", 0));
} else {
filter.criteria().add(new Filter.Criterion(SystemAttributes.REVISION, revision, Filter.Criterion.EQUALS));
}
List allChildFolders = findRepositoryFolderByPath.getAllChildFolders();
List<DimensionsRelatedObject> childItems = findRepositoryFolderByPath.getChildItems(filter);
// Stream.concat(allChildFolders, childItems);
List<ItemRevision> collect = childItems.stream().map(i -> (ItemRevision) i.getObject()).collect(Collectors.toList());
collections = collect.stream().map(convert).collect(Collectors.toList());
} catch (Exception e) {
exceptionHandler.accept(e);
} finally {
manager.close(conn);
}
return collections;
}
use of java.util.Collections in project ORCID-Source by ORCID.
the class SalesForceManagerImplTest method testUpdateContact2.
@Test
public void testUpdateContact2() {
// Switch from main to technical contact
Contact contact = new Contact();
contact.setId("contact2Id");
contact.setAccountId("account1Id");
ContactRole role = new ContactRole(ContactRoleType.TECHNICAL_CONTACT);
role.setId("contact2Idrole1Id");
contact.setRole(role);
((SalesForceManagerImpl) salesForceManager).updateContact(contact, Collections.<Contact>emptyList());
verify(salesForceDao, times(1)).updateContactRole(argThat(r -> {
return "contact2Idrole1Id".equals(r.getId()) && "contact2Id".equals(r.getContactId()) && ContactRoleType.MAIN_CONTACT.equals(r.getRoleType()) && !r.isCurrent();
}));
verify(salesForceDao, times(1)).createContactRole(argThat(r -> {
return "contact2Id".equals(r.getContactId()) && "account1Id".equals(r.getAccountId()) && ContactRoleType.TECHNICAL_CONTACT.equals(r.getRoleType());
}));
}
use of java.util.Collections in project dataverse by IQSS.
the class OrcidOAuth2AP method getNodes.
private List<Node> getNodes(Node node, List<String> path) {
NodeList childs = node.getChildNodes();
final Stream<Node> nodeStream = IntStream.range(0, childs.getLength()).mapToObj(childs::item).filter(n -> n.getNodeName().equals(path.get(0)));
if (path.size() == 1) {
// accumulate and return mode
return nodeStream.collect(Collectors.toList());
} else {
// dig-in mode.
return nodeStream.findFirst().map(n -> getNodes(n, path.subList(1, path.size()))).orElse(Collections.<Node>emptyList());
}
}
use of java.util.Collections in project meghanada-server by mopemope.
the class ASMReflector method reflectAll.
private List<MemberDescriptor> reflectAll(final File file, final String targetClass, final List<String> targetClasses) throws IOException {
if (ModuleHelper.isJrtFsFile(file)) {
final List<MemberDescriptor> results = new ArrayList<>(64);
ModuleHelper.walkModule(path -> {
ModuleHelper.pathToClassData(path).ifPresent(cd -> {
String className = cd.getClassName();
String moduleName = cd.getModuleName();
if (this.ignorePackage(className)) {
return;
}
final Iterator<String> classIterator = targetClasses.iterator();
while (classIterator.hasNext()) {
final String nameWithTP = classIterator.next();
if (nonNull(nameWithTP)) {
final boolean isSuper = !targetClass.equals(nameWithTP);
final String nameWithoutTP = ClassNameUtils.removeTypeParameter(nameWithTP);
if (className.equals(nameWithoutTP)) {
try (final InputStream in = cd.getInputStream()) {
final ClassReader classReader = new ClassReader(in);
final List<MemberDescriptor> members = getMemberFromJar(file, classReader, nameWithoutTP, nameWithTP);
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
results.addAll(members);
classIterator.remove();
break;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
final String innerClassName = ClassNameUtils.replaceInnerMark(className);
if (innerClassName.equals(nameWithoutTP)) {
try (final InputStream in = cd.getInputStream()) {
final ClassReader classReader = new ClassReader(in);
final List<MemberDescriptor> members = this.getMemberFromJar(file, classReader, innerClassName, nameWithTP);
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
results.addAll(members);
classIterator.remove();
break;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
}
});
});
return results;
} else if (file.isFile() && file.getName().endsWith(".jar")) {
try (final JarFile jarFile = new JarFile(file)) {
final Enumeration<JarEntry> entries = jarFile.entries();
final List<MemberDescriptor> results = new ArrayList<>(64);
while (entries.hasMoreElements()) {
if (targetClasses.isEmpty()) {
break;
}
final JarEntry jarEntry = entries.nextElement();
final String entryName = jarEntry.getName();
if (!entryName.endsWith(".class")) {
continue;
}
final String className = ClassNameUtils.replaceSlash(entryName.substring(0, entryName.length() - 6));
if (this.ignorePackage(className)) {
continue;
}
final Iterator<String> classIterator = targetClasses.iterator();
while (classIterator.hasNext()) {
final String nameWithTP = classIterator.next();
if (nonNull(nameWithTP)) {
final boolean isSuper = !targetClass.equals(nameWithTP);
final String nameWithoutTP = ClassNameUtils.removeTypeParameter(nameWithTP);
if (className.equals(nameWithoutTP)) {
try (final InputStream in = jarFile.getInputStream(jarEntry)) {
final ClassReader classReader = new ClassReader(in);
final List<MemberDescriptor> members = this.getMemberFromJar(file, classReader, nameWithoutTP, nameWithTP);
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
results.addAll(members);
classIterator.remove();
break;
}
}
final String innerClassName = ClassNameUtils.replaceInnerMark(className);
if (innerClassName.equals(nameWithoutTP)) {
try (final InputStream in = jarFile.getInputStream(jarEntry)) {
final ClassReader classReader = new ClassReader(in);
final List<MemberDescriptor> members = this.getMemberFromJar(file, classReader, innerClassName, nameWithTP);
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
results.addAll(members);
classIterator.remove();
break;
}
}
}
}
}
return results;
}
} else if (file.isFile() && file.getName().endsWith(".class")) {
for (String nameWithTP : targetClasses) {
final boolean isSuper = !targetClass.equals(nameWithTP);
final String fqcn = ClassNameUtils.removeTypeParameter(nameWithTP);
final List<MemberDescriptor> members = getMembersFromClassFile(file, file, fqcn, false);
if (nonNull(members)) {
// 1 file
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
return members;
}
}
return Collections.emptyList();
} else if (file.isDirectory()) {
try (final Stream<Path> pathStream = Files.walk(file.toPath());
final Stream<File> stream = pathStream.map(Path::toFile).filter(f -> f.isFile() && f.getName().endsWith(".class")).collect(Collectors.toList()).stream()) {
return stream.map(wrapIO(f -> {
final String rootPath = file.getCanonicalPath();
final String path = f.getCanonicalPath();
final String className = ClassNameUtils.replaceSlash(path.substring(rootPath.length() + 1, path.length() - 6));
final Iterator<String> stringIterator = targetClasses.iterator();
while (stringIterator.hasNext()) {
final String nameWithTP = stringIterator.next();
final boolean isSuper = !targetClass.equals(nameWithTP);
final String fqcn = ClassNameUtils.removeTypeParameter(nameWithTP);
if (!className.equals(fqcn)) {
continue;
}
final List<MemberDescriptor> members = getMembersFromClassFile(file, f, fqcn, false);
if (nonNull(members)) {
if (isSuper) {
replaceDescriptorsType(nameWithTP, members);
}
// found
stringIterator.remove();
return members;
}
}
return Collections.<MemberDescriptor>emptyList();
})).filter(memberDescriptors -> nonNull(memberDescriptors) && memberDescriptors.size() > 0).flatMap(Collection::stream).collect(Collectors.toList());
}
}
return Collections.emptyList();
}
Aggregations