use of io.questdb.griffin.SqlExecutionContextImpl in project invesdwin-context-persistence by subes.
the class QuestDBPerformanceTest method testQuestDbPerformance.
@Test
public void testQuestDbPerformance() throws InterruptedException, SqlException, IOException {
final File directory = new File(ContextProperties.getCacheDirectory(), QuestDBPerformanceTest.class.getSimpleName());
Files.deleteNative(directory);
Files.forceMkdir(directory);
final CairoConfiguration configuration = new DefaultCairoConfiguration(directory.getAbsolutePath());
final Instant writesStart = new Instant();
int i = 0;
final CairoEngine engine = new CairoEngine(configuration);
final SqlExecutionContextImpl ctx = new SqlExecutionContextImpl(engine, 1);
final LoopInterruptedCheck loopCheck = new LoopInterruptedCheck(Duration.ONE_SECOND);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
compiler.compile("create table abc (value long, key timestamp) timestamp(key)", ctx);
try (TableWriter writer = engine.getWriter(ctx.getCairoSecurityContext(), "abc", "insert")) {
for (final FDate date : newValues()) {
final TableWriter.Row row = writer.newRow(date.millisValue());
row.putLong(0, date.millisValue());
row.append();
i++;
if (i % FLUSH_INTERVAL == 0) {
if (loopCheck.check()) {
printProgress("Writes", writesStart, i, VALUES);
}
writer.commit();
}
}
writer.commit();
}
printProgress("WritesFinished", writesStart, VALUES, VALUES);
}
readIterator(engine);
readGet(engine);
readGetLatest(engine);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
compiler.compile("dtop table 'abc';", ctx);
}
engine.close();
Files.deleteNative(directory);
}
use of io.questdb.griffin.SqlExecutionContextImpl in project invesdwin-context-persistence by subes.
the class QuestDBPerformanceTest method readGet.
private void readGet(final CairoEngine engine) throws InterruptedException, SqlException {
final LoopInterruptedCheck loopCheck = new LoopInterruptedCheck(Duration.ONE_SECOND);
final List<FDate> values = Lists.toList(newValues());
final Instant readsStart = new Instant();
final SqlExecutionContextImpl ctx = new SqlExecutionContextImpl(engine, 1);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
for (int reads = 0; reads < READS; reads++) {
FDate prevValue = null;
int count = 0;
for (int i = 0; i < values.size(); i++) {
try (RecordCursorFactory factory = compiler.compile("abc WHERE key = cast(" + values.get(i).millisValue() + " AS TIMESTAMP) LIMIT 1", ctx).getRecordCursorFactory()) {
try (RecordCursor cursor = factory.getCursor(ctx)) {
final io.questdb.cairo.sql.Record record = cursor.getRecord();
Assertions.checkTrue(cursor.hasNext());
final FDate value = new FDate(record.getLong(0));
if (prevValue != null) {
Assertions.checkTrue(prevValue.isBefore(value));
}
prevValue = value;
count++;
}
}
}
Assertions.checkEquals(count, VALUES);
if (loopCheck.check()) {
printProgress("Gets", readsStart, VALUES * reads, VALUES * READS);
}
}
}
printProgress("GetsFinished", readsStart, VALUES * READS, VALUES * READS);
}
use of io.questdb.griffin.SqlExecutionContextImpl in project invesdwin-context-persistence by subes.
the class QuestDBPerformanceTest method readGetLatest.
private void readGetLatest(final CairoEngine engine) throws InterruptedException, SqlException {
final LoopInterruptedCheck loopCheck = new LoopInterruptedCheck(Duration.ONE_SECOND);
final List<FDate> values = Lists.toList(newValues());
final Instant readsStart = new Instant();
final SqlExecutionContextImpl ctx = new SqlExecutionContextImpl(engine, 1);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
for (int reads = 0; reads < READS; reads++) {
FDate prevValue = null;
int count = 0;
for (int i = 0; i < values.size(); i++) {
// ORDER BY key DESC is horribly slow (90/s), SELECT max works better
try (RecordCursorFactory factory = compiler.compile("SELECT max(value) FROM abc WHERE key <= cast(" + (values.get(i).millisValue()) + " AS TIMESTAMP) LIMIT 1", ctx).getRecordCursorFactory()) {
try (RecordCursor cursor = factory.getCursor(ctx)) {
final io.questdb.cairo.sql.Record record = cursor.getRecord();
Assertions.checkTrue(cursor.hasNext());
final FDate value = new FDate(record.getLong(0));
if (prevValue != null) {
Assertions.checkTrue(prevValue.isBefore(value));
}
prevValue = value;
count++;
if (loopCheck.check()) {
printProgress("GetLatests", readsStart, VALUES * reads + count, VALUES * READS);
}
}
}
}
Assertions.checkEquals(count, VALUES);
}
}
printProgress("GetLatestsFinished", readsStart, VALUES * READS, VALUES * READS);
}
use of io.questdb.griffin.SqlExecutionContextImpl in project questdb by bluestreak01.
the class IODispatcherTest method testJsonQueryGeoHashColumnChars.
@Test
public void testJsonQueryGeoHashColumnChars() throws Exception {
new HttpQueryTestBuilder().withWorkerCount(1).withHttpServerConfigBuilder(new HttpServerConfigurationBuilder().withSendBufferSize(16 * 1024).withConfiguredMaxQueryResponseRowLimit(configuredMaxQueryResponseRowLimit)).withTempFolder(temp).run(engine -> {
SqlExecutionContextImpl executionContext = new SqlExecutionContextImpl(engine, 1);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
compiler.compile("create table y as (\n" + "select\n" + "cast(rnd_str(null, 'questdb1234567890', 'u10m99dd3pbj') as geohash(1c)) geo1,\n" + "cast(rnd_str(null, 'questdb1234567890', 'u10m99dd3pbj') as geohash(3c)) geo2,\n" + "cast(rnd_str(null, 'questdb1234567890', 'u10m99dd3pbj') as geohash(6c)) geo4,\n" + "cast(rnd_str(null, 'questdb1234567890', 'u10m99dd3pbj') as geohash(12c)) geo8," + "cast(rnd_str(null, 'questdb1234567890', 'u10m99dd3pbj') as geohash(1b)) geo01\n" + "from long_sequence(3)\n" + ")", executionContext);
String request = "SELECT+*+FROM+y";
new SendAndReceiveRequestBuilder().executeWithStandardHeaders("GET /query?query=" + request + " HTTP/1.1\r\n", "0166\r\n" + "{\"query\":\"SELECT * FROM y\",\"columns\":[" + "{\"name\":\"geo1\",\"type\":\"GEOHASH(1c)\"}," + "{\"name\":\"geo2\",\"type\":\"GEOHASH(3c)\"}," + "{\"name\":\"geo4\",\"type\":\"GEOHASH(6c)\"}," + "{\"name\":\"geo8\",\"type\":\"GEOHASH(12c)\"}," + "{\"name\":\"geo01\",\"type\":\"GEOHASH(1b)\"}" + "],\"dataset\":[" + "[null,null,\"questd\",\"u10m99dd3pbj\",\"1\"]," + "[\"u\",\"u10\",\"questd\",null,\"1\"]," + "[\"q\",\"u10\",\"questd\",\"questdb12345\",\"1\"]" + "],\"count\":3}\r\n" + "00\r\n" + "\r\n");
}
});
}
use of io.questdb.griffin.SqlExecutionContextImpl in project questdb by bluestreak01.
the class IODispatcherTest method testImportEpochTimestamp.
@Test
public void testImportEpochTimestamp() throws Exception {
new HttpQueryTestBuilder().withTempFolder(temp).withWorkerCount(2).withHttpServerConfigBuilder(new HttpServerConfigurationBuilder().withNetwork(NetworkFacadeImpl.INSTANCE).withDumpingTraffic(false).withAllowDeflateBeforeSend(false).withHttpProtocolVersion("HTTP/1.1 ").withServerKeepAlive(true)).run((engine) -> {
SqlExecutionContextImpl executionContext = new SqlExecutionContextImpl(engine, 1);
try (SqlCompiler compiler = new SqlCompiler(engine)) {
compiler.compile("create table test (ts timestamp, value int) timestamp(ts) partition by DAY", executionContext);
sendAndReceive(NetworkFacadeImpl.INSTANCE, "POST /upload?name=test HTTP/1.1\r\n" + "Host: localhost:9000\r\n" + "User-Agent: curl/7.71.1\r\n" + "Accept: */*\r\n" + "Content-Length: 372\r\n" + "Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryOsOAD9cPKyHuxyBV\r\n" + "\r\n" + "------WebKitFormBoundaryOsOAD9cPKyHuxyBV\r\n" + "Content-Disposition: form-data; name=\"data\"\r\n" + "\r\n" + "100000000,1000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "100000001,2000\r\n" + "\r\n" + "------WebKitFormBoundaryOsOAD9cPKyHuxyBV--", "HTTP/1.1 200 OK\r\n" + "Server: questDB/1.0\r\n" + "Date: Thu, 1 Jan 1970 00:00:00 GMT\r\n" + "Transfer-Encoding: chunked\r\n" + "Content-Type: text/plain; charset=utf-8\r\n" + "\r\n" + "0507\r\n" + "+-----------------------------------------------------------------------------------------------------------------+\r\n" + "| Location: | test | Pattern | Locale | Errors |\r\n" + "| Partition by | DAY | | | |\r\n" + "| Timestamp | ts | | | |\r\n" + "+-----------------------------------------------------------------------------------------------------------------+\r\n" + "| Rows handled | 11 | | | |\r\n" + "| Rows imported | 11 | | | |\r\n" + "+-----------------------------------------------------------------------------------------------------------------+\r\n" + "| 0 | ts | TIMESTAMP | 0 |\r\n" + "| 1 | value | INT | 0 |\r\n" + "+-----------------------------------------------------------------------------------------------------------------+\r\n" + "\r\n" + "00\r\n" + "\r\n", 1, 0, false, true);
StringSink sink = new StringSink();
TestUtils.assertSql(compiler, executionContext, "test", sink, "ts\tvalue\n" + "1970-01-01T00:01:40.000000Z\t1000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n" + "1970-01-01T00:01:40.000001Z\t2000\n");
}
});
}
Aggregations