use of org.apache.spark.sql.SQLConf in project kylo by Teradata.
the class AbstractHiveDataSetProviderTest method beforeEach.
/**
* Set up tests.
*/
@Before
public void beforeEach() {
sqlContext = Mockito.mock(SQLContext.class);
Mockito.when(sqlContext.conf()).thenReturn(new SQLConf());
}
use of org.apache.spark.sql.SQLConf in project kylo by Teradata.
the class AbstractJdbcDataSetProviderTest method write.
/**
* Verify writing to a JDBC table.
*/
@Test
public void write() throws Exception {
// Mock data set
final DataFrame dataFrame = Mockito.mock(DataFrame.class);
final SQLContext sqlContext = Mockito.mock(SQLContext.class);
Mockito.when(sqlContext.conf()).thenReturn(new SQLConf());
Mockito.when(dataFrame.sqlContext()).thenReturn(sqlContext);
final StructField field1 = DataTypes.createStructField("col1", DataTypes.IntegerType, true);
final StructField field2 = DataTypes.createStructField("col2", DataTypes.StringType, true);
Mockito.when(dataFrame.schema()).thenReturn(DataTypes.createStructType(Arrays.asList(field1, field2)));
// Mock options
final DataSetOptions options = new DataSetOptions();
options.setOption("dbtable", "mytable");
options.setOption("url", "jdbc:h2:mem:spark");
// Test writing
try (final Connection conn = DriverManager.getConnection("jdbc:h2:mem:spark")) {
final MockJdbcDataSetProvider provider = new MockJdbcDataSetProvider();
provider.write(Mockito.mock(KyloCatalogClient.class), options, dataFrame);
try (final Statement stmt = conn.createStatement();
final ResultSet rs = stmt.executeQuery("SHOW COLUMNS FROM mytable")) {
Assert.assertTrue("Expected 2 rows; found 0 rows", rs.next());
Assert.assertEquals("COL1", rs.getString(1));
Assert.assertEquals("INTEGER(10)", rs.getString(2));
Assert.assertTrue("Expected 2 rows; found 1 row", rs.next());
Assert.assertEquals("COL2", rs.getString(1));
Assert.assertEquals("CLOB(2147483647)", rs.getString(2));
Assert.assertFalse("Expected 2 rows; fonud 3 rows", rs.next());
}
}
}
Aggregations