use of java.io.SequenceInputStream in project ovirt-engine by oVirt.
the class SSHDialog method executeCommand.
/**
* Execute command.
*
* @param sink
* sink to use.
* @param command
* command to execute.
* @param initial
* initial input streams to send to host before dialog begins.
*/
public void executeCommand(Sink sink, String command, InputStream[] initial) throws Exception {
log.info("SSH execute '{}' '{}'", client.getDisplayHost(), command);
try (final PipedInputStream pinStdin = new PipedInputStream(BUFFER_SIZE);
final OutputStream poutStdin = new PipedOutputStream(pinStdin);
final PipedInputStream pinStdout = new PipedInputStream(BUFFER_SIZE);
final OutputStream poutStdout = new PipedOutputStream(pinStdout);
final ByteArrayOutputStream stderr = new ConstraintByteArrayOutputStream(1024)) {
try {
List<InputStream> stdinList;
if (initial == null) {
stdinList = new LinkedList<>();
} else {
stdinList = new LinkedList<>(Arrays.asList(initial));
}
stdinList.add(pinStdin);
sink.setControl(() -> {
if (client != null) {
client.close();
}
});
sink.setStreams(pinStdout, poutStdin);
sink.start();
try {
client.executeCommand(command, new SequenceInputStream(Collections.enumeration(stdinList)), poutStdout, stderr);
} catch (Exception e) {
if (stderr.size() == 0) {
throw e;
}
log.error("Swallowing exception as preferring stderr", ExceptionUtils.getRootCauseMessage(e));
log.debug("Exception", e);
} finally {
if (stderr.size() > 0) {
throw new RuntimeException(String.format("Unexpected error during execution: %1$s", new String(stderr.toByteArray(), StandardCharsets.UTF_8)));
}
}
} catch (Exception e) {
log.error("SSH error running command {}:'{}': {}", client.getDisplayHost(), command, ExceptionUtils.getRootCauseMessage(e));
log.debug("Exception", e);
throw e;
} finally {
sink.stop();
sink.setStreams(null, null);
}
}
log.debug("execute leave");
}
use of java.io.SequenceInputStream in project plugin-prov by ligoj.
the class ProvQuoteInstanceResource method upload.
/**
* Upload a file of quote in add mode.
*
* @param subscription
* The subscription identifier, will be used to filter the locations from the associated provider.
* @param uploadedFile
* Instance entries files to import. Currently support only CSV format.
* @param headers
* the CSV header names. When <code>null</code> or empty, the default headers are used.
* @param headersIncluded
* When <code>true</code>, the first line is the headers and the given <code>headers</code> parameter is
* ignored. Otherwise the <code>headers</code> parameter is used.
* @param usage
* The optional usage name. When not <code>null</code>, each quote instance will be associated to this
* usage.
* @param ramMultiplier
* The multiplier for imported RAM values. Default is 1.
* @param encoding
* CSV encoding. Default is UTF-8.
* @throws IOException
* When the CSV stream cannot be written.
*/
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("{subscription:\\d+}/upload")
public void upload(@PathParam("subscription") final int subscription, @Multipart(value = "csv-file") final InputStream uploadedFile, @Multipart(value = "headers", required = false) final String[] headers, @Multipart(value = "headers-included", required = false) final boolean headersIncluded, @Multipart(value = "usage", required = false) final String usage, @Multipart(value = "memoryUnit", required = false) final Integer ramMultiplier, @Multipart(value = "encoding", required = false) final String encoding) throws IOException {
subscriptionResource.checkVisibleSubscription(subscription).getNode().getId();
final String safeEncoding = ObjectUtils.defaultIfNull(encoding, StandardCharsets.UTF_8.name());
// Check headers validity
final String[] sanitizeColumns;
final Reader reader;
if (headersIncluded) {
// Header at first line
final String rawFile = IOUtils.toString(uploadedFile, safeEncoding);
sanitizeColumns = StringUtils.defaultString(new BufferedReader(new StringReader(rawFile)).readLine(), "").replace(',', ';').split(";");
reader = new StringReader(rawFile);
} else {
// Headers are provided separately
sanitizeColumns = ArrayUtils.isEmpty(headers) ? DEFAULT_HEADERS : headers;
reader = new InputStreamReader(new SequenceInputStream(new ByteArrayInputStream((StringUtils.chop(ArrayUtils.toString(sanitizeColumns)).substring(1).replace(',', ';') + "\n").getBytes(safeEncoding)), uploadedFile), safeEncoding);
}
checkHeaders(ACCEPTED_HEADERS, sanitizeColumns);
// Build entries
csvForBean.toBean(InstanceUpload.class, reader).stream().filter(Objects::nonNull).forEach(i -> persist(i, subscription, usage, ramMultiplier));
}
use of java.io.SequenceInputStream in project Orekit by CS-SI.
the class OEMParserTest method testITRFFrames.
/**
* Check the parser can parse several ITRF frames. Test case for #361.
*
* @throws OrekitException on error.
*/
@Test
public void testITRFFrames() throws OrekitException {
// setup
Charset utf8 = StandardCharsets.UTF_8;
IERSConventions conventions = IERSConventions.IERS_2010;
boolean simpleEop = true;
Frame itrf2008 = FramesFactory.getITRF(conventions, simpleEop);
OEMParser parser = new OEMParser().withSimpleEOP(simpleEop).withConventions(conventions);
// frames to check
List<Pair<String, Frame>> frames = new ArrayList<>();
frames.add(new Pair<>("ITRF-93", Predefined.ITRF_2008_TO_ITRF_93.createTransformedITRF(itrf2008, "ITRF93")));
frames.add(new Pair<>("ITRF-97", Predefined.ITRF_2008_TO_ITRF_97.createTransformedITRF(itrf2008, "ITRF97")));
frames.add(new Pair<>("ITRF2000", Predefined.ITRF_2008_TO_ITRF_2000.createTransformedITRF(itrf2008, "ITRF2000")));
frames.add(new Pair<>("ITRF2005", Predefined.ITRF_2008_TO_ITRF_2005.createTransformedITRF(itrf2008, "ITRF2005")));
frames.add(new Pair<>("ITRF2008", itrf2008));
for (Pair<String, Frame> frame : frames) {
final String frameName = frame.getFirst();
InputStream pre = OEMParserTest.class.getResourceAsStream("/ccsds/OEMExample7.txt.pre");
InputStream middle = new ByteArrayInputStream(("REF_FRAME = " + frameName).getBytes(utf8));
InputStream post = OEMParserTest.class.getResourceAsStream("/ccsds/OEMExample7.txt.post");
InputStream input = new SequenceInputStream(pre, new SequenceInputStream(middle, post));
// action
OEMFile actual = parser.parse(input);
// verify
EphemeridesBlock actualBlock = actual.getEphemeridesBlocks().get(0);
Assert.assertEquals(actualBlock.getFrameString(), frameName);
// check expected frame
Frame actualFrame = actualBlock.getFrame();
Frame expectedFrame = frame.getSecond();
Assert.assertEquals(actualFrame.getName(), expectedFrame.getName());
Assert.assertEquals(actualFrame.getTransformProvider(), expectedFrame.getTransformProvider());
}
}
use of java.io.SequenceInputStream in project pravega by pravega.
the class OperationLogTestBase method getExpectedContents.
/**
* Given a list of Log Operations, generates an InputStream for each encountered StreamSegment that contains the final
* contents of that StreamSegment. Only considers operations of type StreamSegmentAppendOperation and MergeSegmentOperation.
*/
private AbstractMap<Long, InputStream> getExpectedContents(Collection<OperationWithCompletion> operations) {
HashMap<Long, List<InputStream>> partialContents = new HashMap<>();
for (OperationWithCompletion o : operations) {
Assert.assertTrue("Operation is not completed.", o.completion.isDone());
if (o.completion.isCompletedExceptionally()) {
// This is failed operation; ignore it.
continue;
}
if (o.operation instanceof StreamSegmentAppendOperation) {
StreamSegmentAppendOperation appendOperation = (StreamSegmentAppendOperation) o.operation;
List<InputStream> segmentContents = partialContents.get(appendOperation.getStreamSegmentId());
if (segmentContents == null) {
segmentContents = new ArrayList<>();
partialContents.put(appendOperation.getStreamSegmentId(), segmentContents);
}
segmentContents.add(appendOperation.getData().getReader());
} else if (o.operation instanceof MergeSegmentOperation) {
MergeSegmentOperation mergeOperation = (MergeSegmentOperation) o.operation;
List<InputStream> targetSegmentContents = partialContents.get(mergeOperation.getStreamSegmentId());
if (targetSegmentContents == null) {
targetSegmentContents = new ArrayList<>();
partialContents.put(mergeOperation.getStreamSegmentId(), targetSegmentContents);
}
List<InputStream> sourceSegmentContents = partialContents.get(mergeOperation.getSourceSegmentId());
targetSegmentContents.addAll(sourceSegmentContents);
partialContents.remove(mergeOperation.getSourceSegmentId());
}
}
// Construct final result.
HashMap<Long, InputStream> result = new HashMap<>();
for (Map.Entry<Long, List<InputStream>> e : partialContents.entrySet()) {
result.put(e.getKey(), new SequenceInputStream(Iterators.asEnumeration(e.getValue().iterator())));
}
return result;
}
use of java.io.SequenceInputStream in project pravega by pravega.
the class AsyncReadResultProcessorTests method testProcessAll.
/**
* Tests the {@link AsyncReadResultProcessor#processAll} method.
*/
@Test
public void testProcessAll() throws Exception {
// Pre-generate some entries.
ArrayList<byte[]> entries = new ArrayList<>();
int totalLength = generateEntries(entries);
// Setup an entry provider supplier.
AtomicInteger currentIndex = new AtomicInteger();
StreamSegmentReadResult.NextEntrySupplier supplier = (offset, length, makeCopy) -> {
int idx = currentIndex.getAndIncrement();
if (idx == entries.size() - 1) {
// Future read result.
Supplier<BufferView> entryContentsSupplier = () -> new ByteArraySegment(entries.get(idx));
return new TestFutureReadResultEntry(offset, length, entryContentsSupplier, executorService());
} else if (idx >= entries.size()) {
return null;
}
// Normal read.
return new CacheReadResultEntry(offset, entries.get(idx), 0, entries.get(idx).length);
};
// Fetch all the data and compare with expected.
@Cleanup StreamSegmentReadResult rr = new StreamSegmentReadResult(0, totalLength, supplier, "");
val result = AsyncReadResultProcessor.processAll(rr, executorService(), TIMEOUT);
val actualData = result.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).getReader();
val expectedData = new SequenceInputStream(Iterators.asEnumeration(entries.stream().map(ByteArrayInputStream::new).iterator()));
AssertExtensions.assertStreamEquals("Unexpected data read back.", expectedData, actualData, totalLength);
}
Aggregations