use of org.apache.cayenne.dbsync.reverse.filters.FiltersConfig in project cayenne by apache.
the class DbImporterOldMojoConfigurationTest method testLoadSchema2.
public void testLoadSchema2() throws Exception {
FiltersConfig filters = getCdbImport("pom-schema-2.xml").createConfig(mock(Logger.class)).getDbLoaderConfig().getFiltersConfig();
TreeSet<IncludeTableFilter> includes = new TreeSet<>();
includes.add(new IncludeTableFilter(null, new PatternFilter().exclude("^ETL_.*")));
TreeSet<Pattern> excludes = new TreeSet<>(PatternFilter.PATTERN_COMPARATOR);
excludes.add(PatternFilter.pattern("^ETL_.*"));
assertEquals(filters.tableFilter(null, "NHL_STATS"), new TableFilter(includes, excludes));
}
use of org.apache.cayenne.dbsync.reverse.filters.FiltersConfig in project cayenne by apache.
the class MergerOptions method prepareMigrator.
/**
* check database and create the {@link List} of {@link MergerToken}s
*/
protected void prepareMigrator() {
try {
adapter = connectionInfo.makeAdapter(getApplication().getClassLoadingService());
MergerTokenFactory mergerTokenFactory = mergerTokenFactoryProvider.get(adapter);
tokens.setMergerTokenFactory(mergerTokenFactory);
FiltersConfig filters = FiltersConfig.create(defaultCatalog, defaultSchema, TableFilter.everything(), PatternFilter.INCLUDE_NOTHING);
DataMapMerger merger = DataMapMerger.builder(mergerTokenFactory).filters(filters).build();
DbLoaderConfiguration config = new DbLoaderConfiguration();
config.setFiltersConfig(filters);
DataSource dataSource = connectionInfo.makeDataSource(getApplication().getClassLoadingService());
DataMap dbImport;
try (Connection conn = dataSource.getConnection()) {
dbImport = new DbLoader(adapter, conn, config, new LoggingDbLoaderDelegate(LoggerFactory.getLogger(DbLoader.class)), new DefaultObjectNameGenerator(NoStemStemmer.getInstance())).load();
} catch (SQLException e) {
throw new CayenneRuntimeException("Can't doLoad dataMap from db.", e);
}
tokens.setTokens(merger.createMergeTokens(dataMap, dbImport));
} catch (Exception ex) {
reportError("Error loading adapter", ex);
}
}
use of org.apache.cayenne.dbsync.reverse.filters.FiltersConfig in project cayenne by apache.
the class DataMapMergerTest method testProcedures.
@Test
public void testProcedures() {
DataMap dataMap1 = dataMap().with(procedure("proc1").callParameters(new ProcedureParameter("test"))).build();
DataMap dataMap2 = dataMap().build();
PatternFilter patternFilter = new PatternFilter();
patternFilter.include("proc1");
FiltersConfig filtersConfig = FiltersConfig.create(null, null, null, patternFilter);
DataMapMerger merger = DataMapMerger.builder(factory()).filters(filtersConfig).build();
assertEquals(1, merger.createMergeTokens(dataMap1, dataMap2).size());
}
use of org.apache.cayenne.dbsync.reverse.filters.FiltersConfig in project cayenne by apache.
the class DbImporterMojoConfigurationTest method testLoadSchema2.
@Test
public void testLoadSchema2() throws Exception {
DbImporterMojo dbImporterMojo = getCdbImport("pom-schema-2.xml");
DbImportConfiguration dbImportConfiguration = dbImporterMojo.createConfig(mock(Logger.class));
dbImportConfiguration.setFiltersConfig(new FiltersConfigBuilder(dbImporterMojo.getReverseEngineering()).build());
FiltersConfig filters = dbImportConfiguration.getDbLoaderConfig().getFiltersConfig();
TreeSet<IncludeTableFilter> includes = new TreeSet<>();
includes.add(new IncludeTableFilter(null, new PatternFilter().exclude("^ETL_.*")));
TreeSet<Pattern> excludes = new TreeSet<>(PatternFilter.PATTERN_COMPARATOR);
excludes.add(PatternFilter.pattern("^ETL_.*"));
assertEquals(filters.tableFilter(null, "NHL_STATS"), new TableFilter(includes, excludes));
}
use of org.apache.cayenne.dbsync.reverse.filters.FiltersConfig in project cayenne by apache.
the class MergeCase method createMergeTokens.
protected List<MergerToken> createMergeTokens(String tableFilterInclude) {
FiltersConfig filters = FiltersConfig.create(null, null, TableFilter.include(tableFilterInclude), PatternFilter.INCLUDE_NOTHING);
DbLoaderConfiguration loaderConfiguration = new DbLoaderConfiguration();
loaderConfiguration.setFiltersConfig(filters);
DataMap dbImport;
try (Connection conn = node.getDataSource().getConnection()) {
dbImport = new DbLoader(node.getAdapter(), conn, loaderConfiguration, new LoggingDbLoaderDelegate(LoggerFactory.getLogger(DbLoader.class)), new DefaultObjectNameGenerator(NoStemStemmer.getInstance())).load();
} catch (SQLException e) {
throw new CayenneRuntimeException("Can't doLoad dataMap from db.", e);
}
List<MergerToken> tokens = merger().filters(filters).build().createMergeTokens(map, dbImport);
return filter(tokens);
}
Aggregations