use of org.yaml.snakeyaml.Yaml in project georocket by georocket.
the class GeoRocketCli method config.
@Override
protected JsonObject config() {
JsonObject config = super.config();
if (config == null || config.isEmpty()) {
// load configuration file
File confFile;
if (confFilePath != null) {
confFile = new File(confFilePath);
} else {
File confDir = new File(geoRocketCliHome, "conf");
confFile = new File(confDir, "georocket.yaml");
if (!confFile.exists()) {
confFile = new File(confDir, "georocket.yml");
if (!confFile.exists()) {
confFile = new File(confDir, "georocket.json");
}
}
}
config = new JsonObject();
try {
String confFileStr = FileUtils.readFileToString(confFile, "UTF-8");
if (confFile.getName().endsWith(".json")) {
config = new JsonObject(confFileStr);
} else {
Yaml yaml = new Yaml();
@SuppressWarnings("unchecked") Map<String, Object> m = yaml.loadAs(confFileStr, Map.class);
config = JsonUtils.flatten(new JsonObject(m));
}
} catch (IOException e) {
System.err.println("Could not read config file " + confFile + ": " + e.getMessage());
System.exit(1);
} catch (DecodeException e) {
System.err.println("Invalid config file: " + e.getMessage());
System.exit(1);
}
// set default values
if (!config.containsKey(ConfigConstants.HOST)) {
config.put(ConfigConstants.HOST, GeoRocketClient.DEFAULT_HOST);
}
if (!config.containsKey(ConfigConstants.PORT)) {
config.put(ConfigConstants.PORT, GeoRocketClient.DEFAULT_PORT);
}
// overwrite with values from command line
if (host != null) {
config.put(ConfigConstants.HOST, host);
}
if (port != null) {
config.put(ConfigConstants.PORT, port);
}
setConfig(config);
}
return config;
}
use of org.yaml.snakeyaml.Yaml in project embulk by embulk.
the class ConfigLoader method fromPropertiesYamlLiteral.
public ConfigSource fromPropertiesYamlLiteral(Map<String, String> props, String keyPrefix) {
ObjectNode source = new ObjectNode(JsonNodeFactory.instance);
DataSource ds = new DataSourceImpl(model, source);
Yaml yaml = newYaml();
for (Map.Entry<String, String> pair : props.entrySet()) {
if (!pair.getKey().startsWith(keyPrefix)) {
continue;
}
String keyName = pair.getKey().substring(keyPrefix.length());
// TODO exception handling
Object parsedValue = yaml.load(pair.getValue());
JsonNode node = objectToJson(parsedValue);
// handle "." as a map acccessor. for example:
// in.parser.type=csv => {"in": {"parser": {"type": "csv"}}}
// TODO handle "[]" as array index
String[] fragments = keyName.split("\\.");
DataSource key = ds;
for (int i = 0; i < fragments.length - 1; i++) {
// TODO exception handling
key = key.getNestedOrSetEmpty(fragments[i]);
}
key.set(fragments[fragments.length - 1], node);
}
return new DataSourceImpl(model, source);
}
use of org.yaml.snakeyaml.Yaml in project mdw-designer by CenturyLinkCloud.
the class PackageConfigurationSection method updatePackageMetaContent.
private void updatePackageMetaContent() throws XmlException, JSONException, IOException {
if (!workflowPackage.getProject().isOldNamespaces()) {
if (workflowPackage.getMetaContent() != null && workflowPackage.getMetaContent().trim().startsWith("{")) {
PackageVO metaPkg = new PackageVO(new JSONObject(workflowPackage.getMetaContent()));
List<AttributeVO> attributes = null;
if (propertyGroups != null) {
attributes = new ArrayList<>();
for (PropertyGroup propGroup : propertyGroups) {
String group = propGroup.getName();
for (Property prop : propGroup.getPropertyList()) {
AttributeVO attribute = new AttributeVO(prop.getName(), prop.getStringValue());
if (group != null)
attribute.setAttributeGroup(group);
attributes.add(attribute);
}
}
}
metaPkg.setAttributes(attributes);
workflowPackage.setMetaContent(metaPkg.getJson(false).toString(2));
} else if (workflowPackage.getSchemaVersion() >= DataAccess.schemaVersion61) {
List<AttributeVO> attributes = null;
Yaml yaml = new Yaml();
Map<String, Object> map = (Map<String, Object>) yaml.load(workflowPackage.getMetaContent());
PackageVO metaPkg = new PackageVO(map);
if (propertyGroups != null) {
attributes = new ArrayList<>();
for (PropertyGroup propGroup : propertyGroups) {
String group = propGroup.getName();
for (Property prop : propGroup.getPropertyList()) {
AttributeVO attribute = new AttributeVO(prop.getName(), prop.getStringValue());
if (group != null)
attribute.setAttributeGroup(group);
attributes.add(attribute);
}
}
}
metaPkg.setAttributes(attributes);
workflowPackage.setMetaContent(metaPkg.getJson(false).toString(2));
} else {
PackageDocument pkgDefDoc = null;
if (workflowPackage.getMetaContent() == null || workflowPackage.getMetaContent().isEmpty()) {
pkgDefDoc = PackageDocument.Factory.newInstance();
} else {
if (workflowPackage.getMetaContent().startsWith("<bpm:package") || workflowPackage.getMetaContent().startsWith("<package")) {
pkgDefDoc = PackageDocument.Factory.parse(workflowPackage.getMetaContent());
} else {
pkgDefDoc = PackageDocument.Factory.newInstance();
}
}
if (pkgDefDoc.getPackage() == null)
pkgDefDoc.addNewPackage();
if (pkgDefDoc.getPackage().getApplicationProperties() == null)
pkgDefDoc.getPackage().addNewApplicationProperties();
pkgDefDoc.getPackage().getApplicationProperties().setPropertyGroupArray(propertyGroups.toArray(new PropertyGroup[0]));
String procDefStr = pkgDefDoc.xmlText(new XmlOptions().setSavePrettyPrint().setSavePrettyPrintIndent(2));
workflowPackage.setMetaContent(procDefStr);
}
} else {
ProcessDefinitionDocument procDefDoc;
if (workflowPackage.getMetaContent() == null || workflowPackage.getMetaContent().isEmpty())
procDefDoc = ProcessDefinitionDocument.Factory.newInstance();
else
procDefDoc = ProcessDefinitionDocument.Factory.parse(workflowPackage.getMetaContent(), Compatibility.namespaceOptions());
if (procDefDoc.getProcessDefinition() == null)
procDefDoc.addNewProcessDefinition();
if (procDefDoc.getProcessDefinition().getApplicationProperties() == null)
procDefDoc.getProcessDefinition().addNewApplicationProperties();
procDefDoc.getProcessDefinition().getApplicationProperties().setPropertyGroupArray(propertyGroups.toArray(new PropertyGroup[0]));
String procDefStr = DesignerCompatibility.getInstance().getOldProcessDefinition(procDefDoc);
workflowPackage.setMetaContent(procDefStr);
}
}
use of org.yaml.snakeyaml.Yaml in project mdw-designer by CenturyLinkCloud.
the class PackageConfigurationSection method getPropertyGroups.
private List<PropertyGroup> getPropertyGroups() {
if (// empty
workflowPackage.getMetaContent() == null && !workflowPackage.isDefaultPackage()) // string
// signifies
// already
// loaded
{
try {
PackageVO packageVO = workflowPackage.getProject().getDesignerProxy().getDesignerDataAccess().loadPackage(workflowPackage.getId(), false);
workflowPackage.setMetaContent(packageVO.getMetaContent() == null ? "" : packageVO.getMetaContent());
} catch (Exception ex) {
PluginMessages.uiError(getShell(), ex, "Package Config", workflowPackage.getProject());
}
}
if (workflowPackage.getMetaContent() == null || workflowPackage.getMetaContent().trim().length() == 0) {
return new ArrayList<>();
} else {
ApplicationProperties appProps = null;
try {
if (workflowPackage.getMetaContent().trim().startsWith("{")) {
List<PropertyGroup> propGroups = new ArrayList<>();
PackageVO metaPkg = new PackageVO(new JSONObject(workflowPackage.getMetaContent()));
if (metaPkg.getAttributes() != null) {
Map<String, List<AttributeVO>> groupedAttrs = metaPkg.getAttributesByGroup();
for (Map.Entry<String, List<AttributeVO>> group : groupedAttrs.entrySet()) {
PropertyGroup propGroup = PropertyGroup.Factory.newInstance();
if (group.getKey() != null)
propGroup.setName(group.getKey());
for (AttributeVO groupAttr : group.getValue()) {
Property prop = propGroup.addNewProperty();
prop.setName(groupAttr.getAttributeName());
prop.setStringValue(groupAttr.getAttributeValue());
}
propGroups.add(propGroup);
}
}
return propGroups;
} else if (workflowPackage.getSchemaVersion() >= DataAccess.schemaVersion61) {
List<PropertyGroup> propGroups = new ArrayList<>();
Yaml yaml = new Yaml();
Map<String, Object> map = (Map<String, Object>) yaml.load(workflowPackage.getMetaContent());
PackageVO metaPkg = new PackageVO(map);
if (metaPkg.getAttributes() != null) {
Map<String, List<AttributeVO>> groupedAttrs = metaPkg.getAttributesByGroup();
for (Map.Entry<String, List<AttributeVO>> group : groupedAttrs.entrySet()) {
PropertyGroup propGroup = PropertyGroup.Factory.newInstance();
if (group.getKey() != null)
propGroup.setName(group.getKey());
for (AttributeVO groupAttr : group.getValue()) {
Property prop = propGroup.addNewProperty();
prop.setName(groupAttr.getAttributeName());
prop.setStringValue(groupAttr.getAttributeValue());
}
propGroups.add(propGroup);
}
}
return propGroups;
} else if (workflowPackage.getMetaContent().startsWith("<bpm:package") || workflowPackage.getMetaContent().startsWith("<package")) {
PackageDocument pkgDoc = PackageDocument.Factory.parse(workflowPackage.getMetaContent());
appProps = pkgDoc.getPackage().getApplicationProperties();
} else {
ProcessDefinitionDocument procDefDoc = ProcessDefinitionDocument.Factory.parse(workflowPackage.getMetaContent(), Compatibility.namespaceOptions());
appProps = procDefDoc.getProcessDefinition().getApplicationProperties();
}
} catch (Exception ex) {
PluginMessages.uiError(getShell(), ex, "Package Config", workflowPackage.getProject());
}
if (appProps != null && appProps.getPropertyGroupList() != null)
return appProps.getPropertyGroupList();
// not found or can't parse
return new ArrayList<>();
}
}
use of org.yaml.snakeyaml.Yaml in project streamline by hortonworks.
the class StormTopologyActionsImpl method deploy.
@Override
public void deploy(TopologyLayout topology, String mavenArtifacts, TopologyActionContext ctx, String asUser) throws Exception {
ctx.setCurrentAction("Adding artifacts to jar");
Path jarToDeploy = addArtifactsToJar(getArtifactsLocation(topology));
ctx.setCurrentAction("Creating Storm topology YAML file");
String fileName = createYamlFileForDeploy(topology);
ctx.setCurrentAction("Deploying topology via 'storm jar' command");
List<String> commands = new ArrayList<String>();
commands.add(stormCliPath);
commands.add("jar");
commands.add(jarToDeploy.toString());
commands.addAll(getExtraJarsArg(topology));
commands.addAll(getMavenArtifactsRelatedArgs(mavenArtifacts));
commands.addAll(getNimbusConf());
commands.addAll(getSecuredClusterConf(asUser));
commands.add("org.apache.storm.flux.Flux");
commands.add("--remote");
commands.add(fileName);
LOG.info("Deploying Application {}", topology.getName());
LOG.info(String.join(" ", commands));
Process process = executeShellProcess(commands);
ShellProcessResult shellProcessResult = waitProcessFor(process);
int exitValue = shellProcessResult.exitValue;
if (exitValue != 0) {
LOG.error("Topology deploy command failed - exit code: {} / output: {}", exitValue, shellProcessResult.stdout);
String[] lines = shellProcessResult.stdout.split("\\n");
String errors = Arrays.stream(lines).filter(line -> line.startsWith("Exception") || line.startsWith("Caused by")).collect(Collectors.joining(", "));
Pattern pattern = Pattern.compile("Topology with name `(.*)` already exists on cluster");
Matcher matcher = pattern.matcher(errors);
if (matcher.find()) {
throw new TopologyAlreadyExistsOnCluster(matcher.group(1));
} else {
throw new Exception("Topology could not be deployed successfully: storm deploy command failed with " + errors);
}
}
}
Aggregations