use of org.eclipse.ceylon.aether.apache.maven.model.io.xpp3.MavenXpp3Reader in project tycho by eclipse.
the class TychoVersionsPluginCompatibilityTest method invokeVersionsPluginOnTycho0120Project.
/**
* <p>
* This test verifies that current and future versions of the tycho-versions-plugin can be
* executed on a project that is built with Tycho 0.12.0. With this assertion it's possible to
* call the plugin without version on the commandline:
* </p>
* <p>
* <code>mvn org.eclipse.tycho:tycho-versions-plugin:set-version</code>
* </p>
* <p>
* Background: The tycho-versions-plugin 0.12.0 can't handle projects that are built with Tycho
* 0.11.0 or older, see <a href="https://bugs.eclipse.org/bugs/show_bug.cgi?id=363791">Bug
* 363791</a>.
* </p>
*/
@Test
public void invokeVersionsPluginOnTycho0120Project() throws Exception {
String expectedNewVersion = "1.2.3";
Verifier verifier = getVerifier("TychoVersionsPluginTest", true);
verifier.getCliOptions().add("-DnewVersion=" + expectedNewVersion);
verifier.executeGoal("org.eclipse.tycho:tycho-versions-plugin:" + TychoVersion.getTychoVersion() + ":set-version");
verifier.verifyErrorFreeLog();
MavenXpp3Reader pomReader = new MavenXpp3Reader();
Model pomModel = pomReader.read(new FileReader(new File(verifier.getBasedir(), "pom.xml")));
assertEquals("<version> in pom.xml has not been changed!", expectedNewVersion, pomModel.getVersion());
}
use of org.eclipse.ceylon.aether.apache.maven.model.io.xpp3.MavenXpp3Reader in project jib by google.
the class TestPlugin method before.
@Override
protected void before() throws IOException, XmlPullParserException, VerificationException {
// Installs the plugin for use in tests.
Verifier verifier = new Verifier(".", true);
verifier.setAutoclean(false);
verifier.addCliOption("-DskipTests");
verifier.executeGoal("install");
// Reads the project version.
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model = reader.read(Files.newBufferedReader(Paths.get("pom.xml"), StandardCharsets.UTF_8));
pluginVersion = model.getVersion();
}
use of org.eclipse.ceylon.aether.apache.maven.model.io.xpp3.MavenXpp3Reader in project mule by mulesoft.
the class MavenModelFactory method createMavenProject.
/**
* Creates a {@link Model} by reading the {@code pom.xml} file.
*
* @param pomFile to parse and read the model
* @return {@link Model} representing the Maven project from pom file.
*/
public static Model createMavenProject(File pomFile) {
MavenXpp3Reader mavenReader = new MavenXpp3Reader();
if (pomFile != null && pomFile.exists()) {
try (FileReader reader = new FileReader(pomFile)) {
Model model = mavenReader.read(reader);
model.setPomFile(pomFile);
return model;
} catch (Exception e) {
throw new RuntimeException("Couldn't get Maven Artifact from pom: " + pomFile);
}
}
throw new IllegalArgumentException("pom file doesn't exits for path: " + pomFile);
}
use of org.eclipse.ceylon.aether.apache.maven.model.io.xpp3.MavenXpp3Reader in project ceylon by eclipse.
the class AetherResolverImpl method findExtension.
private String findExtension(File pomFile) {
if (pomFile != null && pomFile.exists()) {
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model;
try (FileReader fileReader = new FileReader(pomFile)) {
model = reader.read(fileReader);
return model.getPackaging();
} catch (XmlPullParserException | IOException e) {
return null;
}
}
;
return null;
}
use of org.eclipse.ceylon.aether.apache.maven.model.io.xpp3.MavenXpp3Reader in project zeppelin by apache.
the class SparkIntegrationTest method testInterpreterBasics.
private void testInterpreterBasics() throws IOException, InterpreterException, XmlPullParserException {
// add jars & packages for testing
InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
sparkInterpreterSetting.setProperty("spark.jars.packages", "com.maxmind.geoip2:geoip2:2.5.0");
sparkInterpreterSetting.setProperty("SPARK_PRINT_LAUNCH_COMMAND", "true");
sparkInterpreterSetting.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model = reader.read(new FileReader("pom.xml"));
sparkInterpreterSetting.setProperty("spark.jars", new File("target/zeppelin-interpreter-integration-" + model.getVersion() + ".jar").getAbsolutePath());
// test SparkInterpreter
Interpreter sparkInterpreter = interpreterFactory.getInterpreter("spark.spark", new ExecutionContext("user1", "note1", "test"));
InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
InterpreterResult interpreterResult = sparkInterpreter.interpret("sc.version", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
String detectedSparkVersion = interpreterResult.message().get(0).getData();
assertTrue(detectedSparkVersion + " doesn't contain " + this.sparkVersion, detectedSparkVersion.contains(this.sparkVersion));
interpreterResult = sparkInterpreter.interpret("sc.range(1,10).sum()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("45"));
interpreterResult = sparkInterpreter.interpret("sc.getConf.get(\"spark.user.name\")", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("user1"));
// test jars & packages can be loaded correctly
interpreterResult = sparkInterpreter.interpret("import org.apache.zeppelin.interpreter.integration.DummyClass\n" + "import com.maxmind.geoip2._", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test PySparkInterpreter
Interpreter pySparkInterpreter = interpreterFactory.getInterpreter("spark.pyspark", new ExecutionContext("user1", "note1", "test"));
interpreterResult = pySparkInterpreter.interpret("sqlContext.createDataFrame([(1,'a'),(2,'b')], ['id','name']).registerTempTable('test')", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test IPySparkInterpreter
Interpreter ipySparkInterpreter = interpreterFactory.getInterpreter("spark.ipyspark", new ExecutionContext("user1", "note1", "test"));
interpreterResult = ipySparkInterpreter.interpret("sqlContext.table('test').show()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test SparkSQLInterpreter
Interpreter sqlInterpreter = interpreterFactory.getInterpreter("spark.sql", new ExecutionContext("user1", "note1", "test"));
interpreterResult = sqlInterpreter.interpret("select count(1) as c from test", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(interpreterResult.toString(), InterpreterResult.Type.TABLE, interpreterResult.message().get(0).getType());
assertEquals(interpreterResult.toString(), "c\n2\n", interpreterResult.message().get(0).getData());
// test SparkRInterpreter
Interpreter sparkrInterpreter = interpreterFactory.getInterpreter("spark.r", new ExecutionContext("user1", "note1", "test"));
if (isSpark2() || isSpark3()) {
interpreterResult = sparkrInterpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", context);
} else {
interpreterResult = sparkrInterpreter.interpret("df <- createDataFrame(sqlContext, faithful)\nhead(df)", context);
}
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(interpreterResult.toString(), InterpreterResult.Type.TEXT, interpreterResult.message().get(0).getType());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("eruptions waiting"));
}
Aggregations