Search in sources :

Example 26 with ByteArrayOutputStream

use of org.apache.commons.io.output.ByteArrayOutputStream in project Gaffer by gchq.

the class ImportKeyValuePairRDDToAccumuloHandlerTest method checkImportRDDOfElements.

@Test
public void checkImportRDDOfElements() throws OperationException, IOException {
    final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeSchema.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
    final ArrayBuffer<Element> elements = new ArrayBuffer<>();
    for (int i = 0; i < 10; i++) {
        final Entity entity = new Entity(TestGroups.ENTITY);
        entity.setVertex("" + i);
        final Edge edge1 = new Edge(TestGroups.EDGE);
        edge1.setSource("" + i);
        edge1.setDestination("B");
        edge1.setDirected(false);
        edge1.putProperty(TestPropertyNames.COUNT, 2);
        final Edge edge2 = new Edge(TestGroups.EDGE);
        edge2.setSource("" + i);
        edge2.setDestination("C");
        edge2.setDirected(false);
        edge2.putProperty(TestPropertyNames.COUNT, 4);
        elements.$plus$eq(edge1);
        elements.$plus$eq(edge2);
        elements.$plus$eq(entity);
    }
    final User user = new User();
    final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("tests").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
    final SparkContext sparkContext = new SparkContext(sparkConf);
    // Create Hadoop configuration and serialise to a string
    final Configuration configuration = new Configuration();
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    configuration.write(new DataOutputStream(baos));
    final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
    final String outputPath = this.getClass().getResource("/").getPath().toString() + "load";
    final String failurePath = this.getClass().getResource("/").getPath().toString() + "failure";
    final File file = new File(outputPath);
    if (file.exists()) {
        FileUtils.forceDelete(file);
    }
    final ElementConverterFunction func = new ElementConverterFunction(sparkContext.broadcast(new ByteEntityAccumuloElementConverter(graph1.getSchema()), ACCUMULO_ELEMENT_CONVERTER_CLASS_TAG));
    final RDD<Tuple2<Key, Value>> elementRDD = sparkContext.parallelize(elements, 1, ELEMENT_CLASS_TAG).flatMap(func, TUPLE2_CLASS_TAG);
    final ImportKeyValuePairRDDToAccumulo addRdd = new ImportKeyValuePairRDDToAccumulo.Builder().input(elementRDD).outputPath(outputPath).failurePath(failurePath).build();
    graph1.execute(addRdd, user);
    FileUtils.forceDelete(file);
    // Check all elements were added
    final GetRDDOfAllElements rddQuery = new GetRDDOfAllElements.Builder().sparkContext(sparkContext).option(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString).build();
    final RDD<Element> rdd = graph1.execute(rddQuery, user);
    if (rdd == null) {
        fail("No RDD returned");
    }
    final Set<Element> results = new HashSet<>();
    final Element[] returnedElements = (Element[]) rdd.collect();
    Collections.addAll(results, returnedElements);
    assertEquals(elements.size(), results.size());
    sparkContext.stop();
}
Also used : Entity(uk.gov.gchq.gaffer.data.element.Entity) User(uk.gov.gchq.gaffer.user.User) Configuration(org.apache.hadoop.conf.Configuration) DataOutputStream(java.io.DataOutputStream) Element(uk.gov.gchq.gaffer.data.element.Element) GetRDDOfAllElements(uk.gov.gchq.gaffer.spark.operation.scalardd.GetRDDOfAllElements) ImportKeyValuePairRDDToAccumulo(uk.gov.gchq.gaffer.sparkaccumulo.operation.scalardd.ImportKeyValuePairRDDToAccumulo) HashSet(java.util.HashSet) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) Graph(uk.gov.gchq.gaffer.graph.Graph) SparkContext(org.apache.spark.SparkContext) Tuple2(scala.Tuple2) ArrayBuffer(scala.collection.mutable.ArrayBuffer) ByteEntityAccumuloElementConverter(uk.gov.gchq.gaffer.accumulostore.key.core.impl.byteEntity.ByteEntityAccumuloElementConverter) ElementConverterFunction(uk.gov.gchq.gaffer.sparkaccumulo.operation.utils.scala.ElementConverterFunction) Edge(uk.gov.gchq.gaffer.data.element.Edge) SparkConf(org.apache.spark.SparkConf) File(java.io.File) Test(org.junit.Test)

Example 27 with ByteArrayOutputStream

use of org.apache.commons.io.output.ByteArrayOutputStream in project Gaffer by gchq.

the class ImportRDDOfElementsHandlerTest method checkImportRDDOfElements.

@Test
public void checkImportRDDOfElements() throws OperationException, IOException {
    final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
    final ArrayBuffer<Element> elements = new ArrayBuffer<>();
    for (int i = 0; i < 10; i++) {
        final Entity entity = new Entity(TestGroups.ENTITY);
        entity.setVertex("" + i);
        final Edge edge1 = new Edge(TestGroups.EDGE);
        edge1.setSource("" + i);
        edge1.setDestination("B");
        edge1.setDirected(false);
        edge1.putProperty(TestPropertyNames.COUNT, 2);
        final Edge edge2 = new Edge(TestGroups.EDGE);
        edge2.setSource("" + i);
        edge2.setDestination("C");
        edge2.setDirected(false);
        edge2.putProperty(TestPropertyNames.COUNT, 4);
        elements.$plus$eq(edge1);
        elements.$plus$eq(edge2);
        elements.$plus$eq(entity);
    }
    final User user = new User();
    final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("tests").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
    final SparkContext sparkContext = new SparkContext(sparkConf);
    // Create Hadoop configuration and serialise to a string
    final Configuration configuration = new Configuration();
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    configuration.write(new DataOutputStream(baos));
    final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
    final String outputPath = this.getClass().getResource("/").getPath().toString() + "load";
    final String failurePath = this.getClass().getResource("/").getPath().toString() + "failure";
    final File file = new File(outputPath);
    if (file.exists()) {
        FileUtils.forceDelete(file);
    }
    final RDD<Element> elementRDD = sparkContext.parallelize(elements, 8, ELEMENT_CLASS_TAG);
    final ImportRDDOfElements addRdd = new ImportRDDOfElements.Builder().sparkContext(sparkContext).input(elementRDD).option("outputPath", outputPath).option("failurePath", failurePath).build();
    graph1.execute(addRdd, user);
    FileUtils.forceDelete(file);
    // Check all elements were added
    final GetRDDOfAllElements rddQuery = new GetRDDOfAllElements.Builder().sparkContext(sparkContext).option(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString).build();
    final RDD<Element> rdd = graph1.execute(rddQuery, user);
    if (rdd == null) {
        fail("No RDD returned");
    }
    final Set<Element> results = new HashSet<>();
    final Element[] returnedElements = (Element[]) rdd.collect();
    for (int i = 0; i < returnedElements.length; i++) {
        results.add(returnedElements[i]);
    }
    assertEquals(elements.size(), results.size());
    sparkContext.stop();
}
Also used : Entity(uk.gov.gchq.gaffer.data.element.Entity) User(uk.gov.gchq.gaffer.user.User) Configuration(org.apache.hadoop.conf.Configuration) DataOutputStream(java.io.DataOutputStream) Element(uk.gov.gchq.gaffer.data.element.Element) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) Graph(uk.gov.gchq.gaffer.graph.Graph) SparkContext(org.apache.spark.SparkContext) ImportRDDOfElements(uk.gov.gchq.gaffer.spark.operation.scalardd.ImportRDDOfElements) GetRDDOfAllElements(uk.gov.gchq.gaffer.spark.operation.scalardd.GetRDDOfAllElements) ArrayBuffer(scala.collection.mutable.ArrayBuffer) Edge(uk.gov.gchq.gaffer.data.element.Edge) SparkConf(org.apache.spark.SparkConf) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 28 with ByteArrayOutputStream

use of org.apache.commons.io.output.ByteArrayOutputStream in project gocd by gocd.

the class GoFileConfigDataSource method encryptPasswords.

private void encryptPasswords(File configFile) throws Exception {
    String currentContent = FileUtils.readFileToString(configFile);
    GoConfigHolder configHolder = magicalGoConfigXmlLoader.loadConfigHolder(currentContent);
    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    magicalGoConfigXmlWriter.write(configHolder.configForEdit, stream, true);
    String postEncryptContent = new String(stream.toByteArray());
    if (!currentContent.equals(postEncryptContent)) {
        LOGGER.debug("[Encrypt] Writing config to file");
        FileUtils.writeStringToFile(configFile, postEncryptContent);
    }
}
Also used : ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream)

Example 29 with ByteArrayOutputStream

use of org.apache.commons.io.output.ByteArrayOutputStream in project gocd by gocd.

the class GoFileConfigDataSource method configAsXml.

public String configAsXml(CruiseConfig config, boolean skipPreprocessingAndValidation) throws Exception {
    LOGGER.debug("[Config Save] === Converting config to XML");
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    magicalGoConfigXmlWriter.write(config, outputStream, skipPreprocessingAndValidation);
    LOGGER.debug("[Config Save] === Done converting config to XML");
    return outputStream.toString();
}
Also used : ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream)

Example 30 with ByteArrayOutputStream

use of org.apache.commons.io.output.ByteArrayOutputStream in project nhin-d by DirectProject.

the class NotificationTest method testFailedNotification.

public void testFailedNotification() throws Exception {
    Notification noti = new Notification(NotificationType.Failed);
    System.out.println(noti);
    MimeMultipart mm = noti.getAsMultipart();
    assertNotNull(mm);
    assertEquals(2, mm.getCount());
    BodyPart part = mm.getBodyPart(0);
    assertTrue(part.getContentType().startsWith("text/plain"));
    assertEquals(Notification.DefaultExplanationFailed, part.getContent().toString());
    part = mm.getBodyPart(1);
    assertTrue(part.getContentType().startsWith("message/disposition-notification"));
    ByteArrayOutputStream outStream = new ByteArrayOutputStream();
    part.writeTo(outStream);
    String content = new String(outStream.toByteArray());
    assertTrue(content.contains("automatic-action/MDN-sent-automatically;failed"));
}
Also used : BodyPart(javax.mail.BodyPart) MimeMultipart(javax.mail.internet.MimeMultipart) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) DispositionNotification(com.sun.mail.dsn.DispositionNotification)

Aggregations

ByteArrayOutputStream (org.apache.commons.io.output.ByteArrayOutputStream)80 Test (org.junit.Test)36 DataOutputStream (java.io.DataOutputStream)15 IOException (java.io.IOException)14 HashSet (java.util.HashSet)13 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)12 ByteArrayInputStream (java.io.ByteArrayInputStream)11 SparkConf (org.apache.spark.SparkConf)10 Edge (uk.gov.gchq.gaffer.data.element.Edge)10 Element (uk.gov.gchq.gaffer.data.element.Element)10 Entity (uk.gov.gchq.gaffer.data.element.Entity)10 Graph (uk.gov.gchq.gaffer.graph.Graph)10 User (uk.gov.gchq.gaffer.user.User)10 PrintStream (java.io.PrintStream)9 HashMap (java.util.HashMap)9 File (java.io.File)8 InputStream (java.io.InputStream)6 OutputStream (java.io.OutputStream)6 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)6