use of com.google.common.io.ByteSource in project druid by druid-io.
the class AzureTaskLogsTest method testStreamTaskLogWithNegative.
@Test
public void testStreamTaskLogWithNegative() throws Exception {
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
replayAll();
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, -3);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
Assert.assertEquals(writer.toString(), testLog.substring(testLog.length() - 3));
verifyAll();
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class HadoopConverterJobTest method setUp.
@Before
public void setUp() throws Exception {
final MetadataStorageUpdaterJobSpec metadataStorageUpdaterJobSpec = new MetadataStorageUpdaterJobSpec() {
@Override
public String getSegmentTable() {
return derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
}
@Override
public MetadataStorageConnectorConfig get() {
return derbyConnectorRule.getMetadataConnectorConfig();
}
};
final File scratchFileDir = temporaryFolder.newFolder();
storageLocProperty = System.getProperty(STORAGE_PROPERTY_KEY);
tmpSegmentDir = temporaryFolder.newFolder();
System.setProperty(STORAGE_PROPERTY_KEY, tmpSegmentDir.getAbsolutePath());
final URL url = Preconditions.checkNotNull(Query.class.getClassLoader().getResource("druid.sample.tsv"));
final File tmpInputFile = temporaryFolder.newFile();
FileUtils.retryCopy(new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return url.openStream();
}
}, tmpInputFile, FileUtils.IS_EXCEPTION, 3);
final HadoopDruidIndexerConfig hadoopDruidIndexerConfig = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(new DataSchema(DATASOURCE, HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList(TestIndex.DIMENSIONS)), null, null), "\t", "", Arrays.asList(TestIndex.COLUMNS)), null), Map.class), new AggregatorFactory[] { new DoubleSumAggregatorFactory(TestIndex.METRICS[0], TestIndex.METRICS[0]), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }, new UniformGranularitySpec(Granularities.MONTH, Granularities.DAY, ImmutableList.<Interval>of(interval)), HadoopDruidIndexerConfig.JSON_MAPPER), new HadoopIOConfig(ImmutableMap.<String, Object>of("type", "static", "paths", tmpInputFile.getAbsolutePath()), metadataStorageUpdaterJobSpec, tmpSegmentDir.getAbsolutePath()), new HadoopTuningConfig(scratchFileDir.getAbsolutePath(), null, null, null, null, null, false, false, false, false, null, false, false, null, null, null, false, false)));
metadataStorageTablesConfigSupplier = derbyConnectorRule.metadataTablesConfigSupplier();
connector = derbyConnectorRule.getConnector();
try {
connector.getDBI().withHandle(new HandleCallback<Void>() {
@Override
public Void withHandle(Handle handle) throws Exception {
handle.execute("DROP TABLE druid_segments");
return null;
}
});
} catch (CallbackFailedException e) {
// Who cares
}
List<Jobby> jobs = ImmutableList.of(new Jobby() {
@Override
public boolean run() {
connector.createSegmentTable(metadataStorageUpdaterJobSpec.getSegmentTable());
return true;
}
}, new HadoopDruidDetermineConfigurationJob(hadoopDruidIndexerConfig), new HadoopDruidIndexerJob(hadoopDruidIndexerConfig, new SQLMetadataStorageUpdaterJobHandler(connector)));
JobHelper.runJobs(jobs, hadoopDruidIndexerConfig);
}
use of com.google.common.io.ByteSource in project keywhiz by square.
the class FileAssetServlet method doGet.
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
ByteSource asset = loadAsset(req.getRequestURI());
if (asset == null) {
resp.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
final String mimeTypeOfExtension = req.getServletContext().getMimeType(req.getRequestURI());
MediaType mediaType = DEFAULT_MEDIA_TYPE;
if (mimeTypeOfExtension != null) {
try {
mediaType = MediaType.parse(mimeTypeOfExtension);
if (mediaType.is(MediaType.ANY_TEXT_TYPE)) {
mediaType = mediaType.withCharset(DEFAULT_CHARSET);
}
} catch (IllegalArgumentException ignore) {
}
}
resp.setContentType(mediaType.type() + "/" + mediaType.subtype());
if (mediaType.charset().isPresent()) {
resp.setCharacterEncoding(mediaType.charset().get().toString());
}
try (OutputStream output = resp.getOutputStream()) {
asset.copyTo(output);
}
} catch (RuntimeException ignored) {
resp.sendError(HttpServletResponse.SC_NOT_FOUND);
}
}
use of com.google.common.io.ByteSource in project keywhiz by square.
the class FileAssetServletTest method loadsIndex.
@Test
public void loadsIndex() throws Exception {
File folder = tempDir.newFolder("loadsIndexTest");
File indexFile = tempDir.newFile("loadsIndexTest/index.html");
Files.write("loadsIndexContent", indexFile, UTF_8);
FileAssetServlet servlet = new FileAssetServlet(folder, "/ui/", "index.html");
ByteSource byteSource = servlet.loadAsset("/ui/");
assertThat(byteSource.read()).isEqualTo(Files.toByteArray(indexFile));
}
use of com.google.common.io.ByteSource in project keywhiz by square.
the class FileAssetServletTest method loadsAsset.
@Test
public void loadsAsset() throws Exception {
File folder = tempDir.newFolder("loadsAssetTest");
File assetFile = tempDir.newFile("loadsAssetTest/asset.txt");
Files.write("loadsAssetContent", assetFile, UTF_8);
FileAssetServlet servlet = new FileAssetServlet(folder, "/ui/", "index.html");
ByteSource byteSource = servlet.loadAsset("/ui/asset.txt");
assertThat(byteSource.read()).isEqualTo(Files.toByteArray(assetFile));
}
Aggregations