use of java.sql.Statement in project hive by apache.
the class TestBeeLineWithArgs method createTable.
/**
* Create table for use by tests
* @throws ClassNotFoundException
* @throws SQLException
*/
private static void createTable() throws ClassNotFoundException, SQLException {
Class.forName(BeeLine.BEELINE_DEFAULT_JDBC_DRIVER);
Connection con = DriverManager.getConnection(miniHS2.getBaseJdbcURL(), userName, "");
assertNotNull("Connection is null", con);
assertFalse("Connection should not be closed", con.isClosed());
Statement stmt = con.createStatement();
assertNotNull("Statement is null", stmt);
stmt.execute("set hive.support.concurrency = false");
HiveConf conf = new HiveConf();
String dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
Path dataFilePath = new Path(dataFileDir, "kv1.txt");
// drop table. ignore error.
try {
stmt.execute("drop table " + tableName);
} catch (Exception ex) {
fail(ex.toString() + " " + ExceptionUtils.getStackTrace(ex));
}
// create table
stmt.execute("create table " + tableName + " (under_col int comment 'the under column', value string) comment '" + tableComment + "'");
// load data
stmt.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName);
}
use of java.sql.Statement in project hive by apache.
the class TestJdbcDriver2 method testBuiltInUDFCol.
/**
* Verify selecting using builtin UDFs
* @throws SQLException
*/
@Test
public void testBuiltInUDFCol() throws SQLException {
Statement stmt = con.createStatement();
ResultSet res = stmt.executeQuery("select c12, bin(c12) from " + dataTypeTableName + " where c1=1");
ResultSetMetaData md = res.getMetaData();
// only one result column
assertEquals(md.getColumnCount(), 2);
// verify the system generated column name
assertEquals(md.getColumnLabel(2), "_c1");
assertTrue(res.next());
assertEquals(res.getLong(1), 1);
assertEquals(res.getString(2), "1");
res.close();
stmt.close();
}
use of java.sql.Statement in project hive by apache.
the class TestJdbcDriver2 method testIntervalTypes.
@Test
public void testIntervalTypes() throws Exception {
Statement stmt = con.createStatement();
// Since interval types not currently supported as table columns, need to create them
// as expressions.
ResultSet res = stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 - c17 as col2 from " + dataTypeTableName);
ResultSetMetaData meta = res.getMetaData();
assertEquals("col1", meta.getColumnLabel(1));
assertEquals(java.sql.Types.OTHER, meta.getColumnType(1));
assertEquals("interval_year_month", meta.getColumnTypeName(1));
assertEquals(11, meta.getColumnDisplaySize(1));
assertEquals(11, meta.getPrecision(1));
assertEquals(0, meta.getScale(1));
assertEquals(HiveIntervalYearMonth.class.getName(), meta.getColumnClassName(1));
assertEquals("col2", meta.getColumnLabel(2));
assertEquals(java.sql.Types.OTHER, meta.getColumnType(2));
assertEquals("interval_day_time", meta.getColumnTypeName(2));
assertEquals(29, meta.getColumnDisplaySize(2));
assertEquals(29, meta.getPrecision(2));
assertEquals(0, meta.getScale(2));
assertEquals(HiveIntervalDayTime.class.getName(), meta.getColumnClassName(2));
// row 1 - results should be null
assertTrue(res.next());
// skip the last (partitioning) column since it is always non-null
for (int i = 1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null", res.getObject(i));
}
// row 2 - results should be null
assertTrue(res.next());
for (int i = 1; i < meta.getColumnCount(); i++) {
assertNull("Column " + i + " should be null", res.getObject(i));
}
// row 3
assertTrue(res.next());
assertEquals("1-0", res.getString(1));
assertEquals(1, ((HiveIntervalYearMonth) res.getObject(1)).getYears());
assertEquals("0 00:00:00.000000000", res.getString(2));
assertEquals(0, ((HiveIntervalDayTime) res.getObject(2)).getDays());
stmt.close();
}
use of java.sql.Statement in project hive by apache.
the class TestJdbcDriver2 method testOutOfBoundCols.
/**
* Test bad args to getXXX()
* @throws SQLException
*/
@Test
public void testOutOfBoundCols() throws SQLException {
Statement stmt = con.createStatement();
ResultSet res = stmt.executeQuery("select * from " + tableName);
// row 1
assertTrue(res.next());
try {
res.getInt(200);
} catch (SQLException e) {
}
try {
res.getInt("zzzz");
} catch (SQLException e) {
}
stmt.close();
}
use of java.sql.Statement in project hive by apache.
the class LlapBaseInputFormat method getSplits.
@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
List<InputSplit> ins = new ArrayList<InputSplit>();
if (url == null)
url = job.get(URL_KEY);
if (query == null)
query = job.get(QUERY_KEY);
if (user == null)
user = job.get(USER_KEY);
if (pwd == null)
pwd = job.get(PWD_KEY);
if (url == null || query == null) {
throw new IllegalStateException();
}
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
throw new IOException(e);
}
String escapedQuery = StringUtils.escapeString(query, ESCAPE_CHAR, escapedChars);
String sql = String.format(SPLIT_QUERY, escapedQuery, numSplits);
try (Connection con = DriverManager.getConnection(url, user, pwd);
Statement stmt = con.createStatement();
ResultSet res = stmt.executeQuery(sql)) {
while (res.next()) {
// deserialize split
DataInput in = new DataInputStream(res.getBinaryStream(1));
InputSplitWithLocationInfo is = new LlapInputSplit();
is.readFields(in);
ins.add(is);
}
} catch (Exception e) {
throw new IOException(e);
}
return ins.toArray(new InputSplit[ins.size()]);
}
Aggregations