Search in sources :

Example 96 with Statement

use of java.sql.Statement in project hive by apache.

the class TestBeeLineWithArgs method createTable.

/**
   * Create table for use by tests
   * @throws ClassNotFoundException
   * @throws SQLException
   */
private static void createTable() throws ClassNotFoundException, SQLException {
    Class.forName(BeeLine.BEELINE_DEFAULT_JDBC_DRIVER);
    Connection con = DriverManager.getConnection(miniHS2.getBaseJdbcURL(), userName, "");
    assertNotNull("Connection is null", con);
    assertFalse("Connection should not be closed", con.isClosed());
    Statement stmt = con.createStatement();
    assertNotNull("Statement is null", stmt);
    stmt.execute("set hive.support.concurrency = false");
    HiveConf conf = new HiveConf();
    String dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
    Path dataFilePath = new Path(dataFileDir, "kv1.txt");
    // drop table. ignore error.
    try {
        stmt.execute("drop table " + tableName);
    } catch (Exception ex) {
        fail(ex.toString() + " " + ExceptionUtils.getStackTrace(ex));
    }
    // create table
    stmt.execute("create table " + tableName + " (under_col int comment 'the under column', value string) comment '" + tableComment + "'");
    // load data
    stmt.execute("load data local inpath '" + dataFilePath.toString() + "' into table " + tableName);
}
Also used : Path(org.apache.hadoop.fs.Path) Statement(java.sql.Statement) Connection(java.sql.Connection) HiveConf(org.apache.hadoop.hive.conf.HiveConf) SQLException(java.sql.SQLException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 97 with Statement

use of java.sql.Statement in project hive by apache.

the class TestJdbcDriver2 method testBuiltInUDFCol.

/**
   * Verify selecting using builtin UDFs
   * @throws SQLException
   */
@Test
public void testBuiltInUDFCol() throws SQLException {
    Statement stmt = con.createStatement();
    ResultSet res = stmt.executeQuery("select c12, bin(c12) from " + dataTypeTableName + " where c1=1");
    ResultSetMetaData md = res.getMetaData();
    // only one result column
    assertEquals(md.getColumnCount(), 2);
    // verify the system generated column name
    assertEquals(md.getColumnLabel(2), "_c1");
    assertTrue(res.next());
    assertEquals(res.getLong(1), 1);
    assertEquals(res.getString(2), "1");
    res.close();
    stmt.close();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) Test(org.junit.Test)

Example 98 with Statement

use of java.sql.Statement in project hive by apache.

the class TestJdbcDriver2 method testIntervalTypes.

@Test
public void testIntervalTypes() throws Exception {
    Statement stmt = con.createStatement();
    // Since interval types not currently supported as table columns, need to create them
    // as expressions.
    ResultSet res = stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 -  c17 as col2 from " + dataTypeTableName);
    ResultSetMetaData meta = res.getMetaData();
    assertEquals("col1", meta.getColumnLabel(1));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(1));
    assertEquals("interval_year_month", meta.getColumnTypeName(1));
    assertEquals(11, meta.getColumnDisplaySize(1));
    assertEquals(11, meta.getPrecision(1));
    assertEquals(0, meta.getScale(1));
    assertEquals(HiveIntervalYearMonth.class.getName(), meta.getColumnClassName(1));
    assertEquals("col2", meta.getColumnLabel(2));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(2));
    assertEquals("interval_day_time", meta.getColumnTypeName(2));
    assertEquals(29, meta.getColumnDisplaySize(2));
    assertEquals(29, meta.getPrecision(2));
    assertEquals(0, meta.getScale(2));
    assertEquals(HiveIntervalDayTime.class.getName(), meta.getColumnClassName(2));
    // row 1 - results should be null
    assertTrue(res.next());
    // skip the last (partitioning) column since it is always non-null
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 2 - results should be null
    assertTrue(res.next());
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 3
    assertTrue(res.next());
    assertEquals("1-0", res.getString(1));
    assertEquals(1, ((HiveIntervalYearMonth) res.getObject(1)).getYears());
    assertEquals("0 00:00:00.000000000", res.getString(2));
    assertEquals(0, ((HiveIntervalDayTime) res.getObject(2)).getDays());
    stmt.close();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) Test(org.junit.Test)

Example 99 with Statement

use of java.sql.Statement in project hive by apache.

the class TestJdbcDriver2 method testOutOfBoundCols.

/**
   * Test bad args to getXXX()
   * @throws SQLException
   */
@Test
public void testOutOfBoundCols() throws SQLException {
    Statement stmt = con.createStatement();
    ResultSet res = stmt.executeQuery("select * from " + tableName);
    // row 1
    assertTrue(res.next());
    try {
        res.getInt(200);
    } catch (SQLException e) {
    }
    try {
        res.getInt("zzzz");
    } catch (SQLException e) {
    }
    stmt.close();
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) Test(org.junit.Test)

Example 100 with Statement

use of java.sql.Statement in project hive by apache.

the class LlapBaseInputFormat method getSplits.

@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
    List<InputSplit> ins = new ArrayList<InputSplit>();
    if (url == null)
        url = job.get(URL_KEY);
    if (query == null)
        query = job.get(QUERY_KEY);
    if (user == null)
        user = job.get(USER_KEY);
    if (pwd == null)
        pwd = job.get(PWD_KEY);
    if (url == null || query == null) {
        throw new IllegalStateException();
    }
    try {
        Class.forName(driverName);
    } catch (ClassNotFoundException e) {
        throw new IOException(e);
    }
    String escapedQuery = StringUtils.escapeString(query, ESCAPE_CHAR, escapedChars);
    String sql = String.format(SPLIT_QUERY, escapedQuery, numSplits);
    try (Connection con = DriverManager.getConnection(url, user, pwd);
        Statement stmt = con.createStatement();
        ResultSet res = stmt.executeQuery(sql)) {
        while (res.next()) {
            // deserialize split
            DataInput in = new DataInputStream(res.getBinaryStream(1));
            InputSplitWithLocationInfo is = new LlapInputSplit();
            is.readFields(in);
            ins.add(is);
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
    return ins.toArray(new InputSplit[ins.size()]);
}
Also used : InputSplitWithLocationInfo(org.apache.hadoop.mapred.InputSplitWithLocationInfo) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) Connection(java.sql.Connection) IOException(java.io.IOException) ByteString(com.google.protobuf.ByteString) DataInputStream(java.io.DataInputStream) IOException(java.io.IOException) DataInput(java.io.DataInput) ResultSet(java.sql.ResultSet) InputSplit(org.apache.hadoop.mapred.InputSplit)

Aggregations

Statement (java.sql.Statement)2195 Connection (java.sql.Connection)1082 ResultSet (java.sql.ResultSet)1081 PreparedStatement (java.sql.PreparedStatement)957 SQLException (java.sql.SQLException)911 Test (org.junit.Test)547 ArrayList (java.util.ArrayList)152 CallableStatement (java.sql.CallableStatement)128 ResultSetMetaData (java.sql.ResultSetMetaData)122 Properties (java.util.Properties)110 IOException (java.io.IOException)85 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)81 DruidPooledStatement (com.alibaba.druid.pool.DruidPooledStatement)71 DataSource (javax.sql.DataSource)62 HashMap (java.util.HashMap)61 SQLFeatureNotSupportedException (java.sql.SQLFeatureNotSupportedException)56 DruidPooledConnection (com.alibaba.druid.pool.DruidPooledConnection)47 Context (javax.naming.Context)42 MockConnection (com.alibaba.druid.mock.MockConnection)41 DatabaseMetaData (java.sql.DatabaseMetaData)40