use of org.apache.beam.sdk.io.snowflake.test.FakeSnowflakeBasicDataSource in project beam by apache.
the class StreamingWriteTest method setup.
@BeforeClass
public static void setup() {
snowflakeService = new FakeSnowflakeStreamingServiceImpl();
PipelineOptionsFactory.register(TestSnowflakePipelineOptions.class);
options = TestPipeline.testingPipelineOptions().as(TestSnowflakePipelineOptions.class);
options.setUsername("username");
options.setServerName("NULL.snowflakecomputing.com");
testData = LongStream.range(0, 100).boxed().collect(Collectors.toList());
FakeSnowflakeDatabase.createTable(FAKE_TABLE);
dataSourceConfiguration = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName()).withSchema("PUBLIC").withDatabase("DATABASE").withWarehouse("WAREHOUSE");
}
use of org.apache.beam.sdk.io.snowflake.test.FakeSnowflakeBasicDataSource in project beam by apache.
the class CreateDispositionTest method setupAll.
@BeforeClass
public static void setupAll() {
PipelineOptionsFactory.register(TestSnowflakePipelineOptions.class);
options = TestPipeline.testingPipelineOptions().as(TestSnowflakePipelineOptions.class);
options.setStagingBucketName(BUCKET_NAME);
options.setServerName("NULL.snowflakecomputing.com");
stagingBucketName = options.getStagingBucketName();
storageIntegrationName = options.getStorageIntegrationName();
snowflakeService = new FakeSnowflakeBatchServiceImpl();
testData = LongStream.range(0, 100).boxed().collect(Collectors.toList());
dc = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName());
}
use of org.apache.beam.sdk.io.snowflake.test.FakeSnowflakeBasicDataSource in project beam by apache.
the class SchemaDispositionTest method setupAll.
@BeforeClass
public static void setupAll() {
PipelineOptionsFactory.register(TestSnowflakePipelineOptions.class);
options = TestPipeline.testingPipelineOptions().as(TestSnowflakePipelineOptions.class);
options.setStagingBucketName(BUCKET_NAME);
options.setServerName("NULL.snowflakecomputing.com");
stagingBucketName = options.getStagingBucketName();
storageIntegrationName = options.getStorageIntegrationName();
snowflakeService = new FakeSnowflakeBatchServiceImpl();
dc = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName());
}
use of org.apache.beam.sdk.io.snowflake.test.FakeSnowflakeBasicDataSource in project beam by apache.
the class SnowflakeIOReadTest method setup.
@BeforeClass
public static void setup() {
List<String> testData = Arrays.asList("Paul,51,red", "Jackson,41,green");
avroTestData = ImmutableList.of(new AvroGeneratedUser("Paul", 51, "red"), new AvroGeneratedUser("Jackson", 41, "green"));
FakeSnowflakeDatabase.createTableWithElements(FAKE_TABLE, testData);
options.setServerName("NULL.snowflakecomputing.com");
options.setStorageIntegrationName("STORAGE_INTEGRATION");
options.setStagingBucketName(BUCKET_NAME);
dataSourceConfiguration = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName());
snowflakeService = new FakeSnowflakeBatchServiceImpl();
}
use of org.apache.beam.sdk.io.snowflake.test.FakeSnowflakeBasicDataSource in project beam by apache.
the class SnowflakeIOWriteTest method setup.
@Before
public void setup() {
FakeSnowflakeDatabase.createTable(FAKE_TABLE);
PipelineOptionsFactory.register(TestSnowflakePipelineOptions.class);
options = TestPipeline.testingPipelineOptions().as(TestSnowflakePipelineOptions.class);
options.setStagingBucketName(BUCKET_NAME);
options.setStorageIntegrationName("STORAGE_INTEGRATION");
options.setServerName("NULL.snowflakecomputing.com");
dc = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName());
}
Aggregations