use of java.io.PipedOutputStream in project nanohttpd by NanoHttpd.
the class TestHttpServer method setUp.
@BeforeClass
public static void setUp() throws Exception {
stdIn = new PipedOutputStream();
System.setIn(new PipedInputStream(stdIn));
serverStartThread = new Thread(new Runnable() {
@Override
public void run() {
String[] args = { "--host", "localhost", "--port", "9090", "--dir", "src/test/resources" };
SimpleWebServer.main(args);
}
});
serverStartThread.start();
// give the server some tine to start.
Thread.sleep(100);
}
use of java.io.PipedOutputStream in project nanohttpd by NanoHttpd.
the class EchoWebSocketsTest method testDirectoryArgument.
@Test
public void testDirectoryArgument() throws IOException, InterruptedException {
final String testPort = "9458";
PipedOutputStream stdIn = new PipedOutputStream();
System.setIn(new PipedInputStream(stdIn));
Thread testServer = new Thread(new Runnable() {
@Override
public void run() {
String[] args = { testPort, "-d" };
try {
EchoSocketSample.main(args);
} catch (IOException e) {
fail("Exception: " + e.getMessage());
}
}
});
testServer.start();
Thread.sleep(1000);
stdIn.write(System.getProperty("line.separator").getBytes());
testServer.join(1000);
assertFalse("Test server failed to close", testServer.isAlive());
}
use of java.io.PipedOutputStream in project dhis2-core by dhis2.
the class DefaultAdxDataService method saveDataValueSetInternal.
private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, JobConfiguration id) {
notifier.clear(id).notify(id, "ADX parsing process started");
ImportOptions adxImportOptions = firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
// Get import options
IdScheme dsScheme = importOptions.getIdSchemes().getDataSetIdScheme();
IdScheme deScheme = importOptions.getIdSchemes().getDataElementIdScheme();
// Create meta-data maps
CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
// Get meta-data maps
IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dsScheme, null);
IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, deScheme, null);
// Heat cache
if (importOptions.isPreheatCacheDefaultFalse()) {
dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dsScheme));
dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(deScheme));
}
XMLReader adxReader = XMLFactory.getXMLReader(in);
ImportSummary importSummary;
adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
ExecutorService executor = Executors.newSingleThreadExecutor();
// For Async runs, give the DXF import a different notification task ID
// so it doesn't conflict with notifications from this level.
JobConfiguration dxfJobId = (id == null) ? null : new JobConfiguration("dxfJob", JobType.DATAVALUE_IMPORT_INTERNAL, id.getUserUid(), true);
int groupCount = 0;
try (PipedOutputStream pipeOut = new PipedOutputStream()) {
Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfJobId, pipeOut, sessionFactory));
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
List<ImportConflict> adxConflicts = new LinkedList<>();
dxfWriter.writeStartDocument("1.0");
dxfWriter.writeStartElement("dataValueSet");
dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
notifier.notify(id, "Starting to import ADX data groups.");
while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
notifier.update(id, "Importing ADX data group: " + groupCount);
// note this returns conflicts which are detected at ADX level
adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
groupCount++;
}
// end dataValueSet
dxfWriter.writeEndElement();
dxfWriter.writeEndDocument();
pipeOut.flush();
importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
ImportSummary summary = importSummary;
adxConflicts.forEach(conflict -> summary.addConflict(conflict.getObject(), conflict.getValue()));
importSummary.getImportCount().incrementIgnored(adxConflicts.size());
} catch (AdxException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
importSummary.addConflict(ex.getObject(), ex.getMessage());
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
} catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
}
executor.shutdown();
notifier.update(id, INFO, "ADX data import done", true).addJobSummary(id, importSummary, ImportSummary.class);
ImportCount c = importSummary.getImportCount();
log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
return importSummary;
}
use of java.io.PipedOutputStream in project netty by netty.
the class CompactObjectSerializationTest method testInterfaceSerialization.
@Test
public void testInterfaceSerialization() throws Exception {
PipedOutputStream pipeOut = new PipedOutputStream();
PipedInputStream pipeIn = new PipedInputStream(pipeOut);
CompactObjectOutputStream out = new CompactObjectOutputStream(pipeOut);
CompactObjectInputStream in = new CompactObjectInputStream(pipeIn, ClassResolvers.cacheDisabled(null));
out.writeObject(List.class);
Assertions.assertSame(List.class, in.readObject());
}
use of java.io.PipedOutputStream in project druid by druid-io.
the class DirectDruidClientTest method testQueryTimeoutBeforeFuture.
@Test
public void testQueryTimeoutBeforeFuture() throws IOException, InterruptedException {
SettableFuture<Object> timeoutFuture = SettableFuture.create();
Capture<Request> capturedRequest = EasyMock.newCapture();
final String queryId = "timeout-before-future";
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(timeoutFuture).anyTimes();
EasyMock.replay(httpClient);
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, System.currentTimeMillis() + 250, "queryId", queryId));
Sequence results = client.run(QueryPlus.wrap(query));
// incomplete result set
PipedInputStream in = new PipedInputStream();
final PipedOutputStream out = new PipedOutputStream(in);
timeoutFuture.set(in);
QueryTimeoutException actualException = null;
try {
out.write(StringUtils.toUtf8("[{\"timestamp\":\"2014-01-01T01:02:03Z\"}"));
Thread.sleep(250);
out.write(StringUtils.toUtf8("]"));
out.close();
results.toList();
} catch (QueryTimeoutException e) {
actualException = e;
}
Assert.assertNotNull(actualException);
Assert.assertEquals("Query timeout", actualException.getErrorCode());
Assert.assertEquals("url[http://localhost:8080/druid/v2/] timed out", actualException.getMessage());
Assert.assertEquals(hostName, actualException.getHost());
EasyMock.verify(httpClient);
}
Aggregations