use of org.voltdb.client.ClientImpl in project voltdb by VoltDB.
the class JDBC4ClientConnection method backpressureBarrier.
/**
* Blocks the current thread until there is no more backpressure or there are no more
* connections to the database
*
* @throws InterruptedException
* @throws IOException
*/
public void backpressureBarrier() throws InterruptedException, IOException {
ClientImpl currentClient = this.getClient();
if (currentClient == null) {
throw new IOException("Client is unavailable for backpressureBarrier().");
}
currentClient.backpressureBarrier();
}
use of org.voltdb.client.ClientImpl in project voltdb by VoltDB.
the class JDBC4ClientConnection method drain.
/**
* Block the current thread until all queued stored procedure invocations have received
* responses or there are no more connections to the cluster
*
* @throws InterruptedException
* @throws IOException
* @see Client#drain()
*/
public void drain() throws InterruptedException, IOException {
ClientImpl currentClient = this.getClient();
if (currentClient == null) {
throw new IOException("Client is unavailable for drain().");
}
currentClient.drain();
}
use of org.voltdb.client.ClientImpl in project voltdb by VoltDB.
the class TestAdhocCompilerException method testEng7653UnexpectedException.
@Test
public void testEng7653UnexpectedException() throws Exception {
// Enables special DDL string triggering artificial exception in AsyncCompilerAgent.
System.setProperty("asynccompilerdebug", "true");
String pathToCatalog = Configuration.getPathToCatalogForTest("adhocddl.jar");
String pathToDeployment = Configuration.getPathToCatalogForTest("adhocddl.xml");
VoltProjectBuilder builder = new VoltProjectBuilder();
builder.setUseDDLSchema(true);
boolean success = builder.compile(pathToCatalog, 1, 1, 0);
assertTrue("Schema compilation failed", success);
MiscUtils.copyFile(builder.getPathToDeployment(), pathToDeployment);
VoltDB.Configuration config = new VoltDB.Configuration();
config.m_pathToCatalog = pathToCatalog;
config.m_pathToDeployment = pathToDeployment;
// Trigger an exception inside AsyncCompilerAgent
try {
startSystem(config);
boolean threw = false;
try {
// Ten seconds should be long enough to detect a hang.
String toxicDDL = AdHocNTBase.DEBUG_EXCEPTION_DDL + ";";
((ClientImpl) m_client).callProcedureWithClientTimeout(BatchTimeoutOverrideType.NO_TIMEOUT, "@AdHoc", 10, TimeUnit.SECONDS, toxicDDL);
} catch (ProcCallException pce) {
String message = pce.getLocalizedMessage();
if (message.startsWith("No response received in the allotted time")) {
// Check that a network thread didn't die.
tryOldClientWithValidDDL();
tryNewClientWithValidDDL();
fail("Timeout, server was probably hung. " + message);
}
assertTrue(String.format("Unexpected exception message: %s...", message), message.contains(AdHocNTBase.DEBUG_EXCEPTION_DDL));
threw = true;
}
assertTrue("Expected exception", threw);
} finally {
teardownSystem();
}
}
use of org.voltdb.client.ClientImpl in project voltdb by VoltDB.
the class KafkaLoader10 method processKafkaMessages.
private void processKafkaMessages() throws Exception {
// Split server list
final String[] serverlist = m_cliOptions.servers.split(",");
// If we need to prompt the user for a VoltDB password, do so.
m_cliOptions.password = CLIConfig.readPasswordIfNeeded(m_cliOptions.user, m_cliOptions.password, "Enter password: ");
// Create connection
final ClientConfig clientConfig = new ClientConfig(m_cliOptions.user, m_cliOptions.password, null);
if (m_cliOptions.ssl != null && !m_cliOptions.ssl.trim().isEmpty()) {
clientConfig.setTrustStoreConfigFromPropertyFile(m_cliOptions.ssl);
clientConfig.enableSSL();
}
clientConfig.setProcedureCallTimeout(0);
m_client = getClient(clientConfig, serverlist);
if (m_cliOptions.useSuppliedProcedure) {
m_loader = new CSVTupleDataLoader((ClientImpl) m_client, m_cliOptions.procedure, new KafkaBulkLoaderCallback());
} else {
m_loader = new CSVBulkDataLoader((ClientImpl) m_client, m_cliOptions.table, m_cliOptions.batch, m_cliOptions.update, new KafkaBulkLoaderCallback());
}
m_loader.setFlushInterval(m_cliOptions.flush, m_cliOptions.flush);
if ((m_executorService = getExecutor()) != null) {
if (m_cliOptions.useSuppliedProcedure) {
m_log.info("Kafka Consumer from topic: " + m_cliOptions.topic + " Started using procedure: " + m_cliOptions.procedure);
} else {
m_log.info("Kafka Consumer from topic: " + m_cliOptions.topic + " Started for table: " + m_cliOptions.table);
}
m_executorService.shutdown();
m_executorService.awaitTermination(365, TimeUnit.DAYS);
m_executorService = null;
}
}
use of org.voltdb.client.ClientImpl in project voltdb by VoltDB.
the class KafkaLoader method processKafkaMessages.
public void processKafkaMessages() throws Exception {
// Split server list
final String[] serverlist = m_config.servers.split(",");
// If we need to prompt the user for a VoltDB password, do so.
m_config.password = CLIConfig.readPasswordIfNeeded(m_config.user, m_config.password, "Enter password: ");
// Create connection
final ClientConfig c_config = new ClientConfig(m_config.user, m_config.password, null);
if (m_config.ssl != null && !m_config.ssl.trim().isEmpty()) {
c_config.setTrustStoreConfigFromPropertyFile(m_config.ssl);
c_config.enableSSL();
}
// Set procedure all to infinite
c_config.setProcedureCallTimeout(0);
m_client = getClient(c_config, serverlist, m_config.port);
if (m_config.useSuppliedProcedure) {
m_loader = new CSVTupleDataLoader((ClientImpl) m_client, m_config.procedure, new KafkaBulkLoaderCallback());
} else {
m_loader = new CSVBulkDataLoader((ClientImpl) m_client, m_config.table, m_config.batch, m_config.update, new KafkaBulkLoaderCallback());
}
m_loader.setFlushInterval(m_config.flush, m_config.flush);
m_consumer = new KafkaConsumerConnector(m_config);
try {
m_executorService = getConsumerExecutor(m_consumer, m_loader);
if (m_config.useSuppliedProcedure) {
m_log.info("Kafka Consumer from topic: " + m_config.topic + " Started using procedure: " + m_config.procedure);
} else {
m_log.info("Kafka Consumer from topic: " + m_config.topic + " Started for table: " + m_config.table);
}
m_executorService.awaitTermination(365, TimeUnit.DAYS);
} catch (Throwable terminate) {
m_log.error("Error in Kafka Consumer", terminate);
System.exit(-1);
}
close();
}
Aggregations