use of com.google.cloud.bigquery.datatransfer.v1.DataSource in project atlasmap by atlasmap.
the class DefaultAtlasContext method init.
/**
* TODO: For dynamic re-load. This needs lock()
*
* @throws AtlasException failed to initialize
*/
protected synchronized void init() throws AtlasException {
if (this.initialized) {
return;
}
registerJmx(this);
if (this.atlasMappingUri != null) {
this.admHandler = new ADMArchiveHandler(factory.getClassLoader());
this.admHandler.setIgnoreLibrary(true);
this.admHandler.load(Paths.get(this.atlasMappingUri));
this.dataSourceMetadataMap = this.admHandler.getDataSourceMetadataMap();
}
if (this.admHandler == null || this.admHandler.getMappingDefinition() == null) {
LOG.warn("AtlasMap context cannot initialize without mapping definition, ignoring:" + " Mapping URI={}", this.atlasMappingUri);
return;
}
sourceModules.clear();
ConstantModule constant = new ConstantModule();
constant.setConversionService(factory.getConversionService());
constant.setFieldActionService(factory.getFieldActionService());
sourceModules.put(AtlasConstants.CONSTANTS_DOCUMENT_ID, constant);
PropertyModule property = new PropertyModule(factory.getPropertyStrategy());
property.setConversionService(factory.getConversionService());
property.setFieldActionService(factory.getFieldActionService());
property.setMode(AtlasModuleMode.SOURCE);
sourceModules.put(AtlasConstants.PROPERTIES_SOURCE_DOCUMENT_ID, property);
targetModules.clear();
property = new PropertyModule(factory.getPropertyStrategy());
property.setConversionService(factory.getConversionService());
property.setFieldActionService(factory.getFieldActionService());
property.setMode(AtlasModuleMode.TARGET);
targetModules.put(AtlasConstants.PROPERTIES_TARGET_DOCUMENT_ID, property);
lookupTables.clear();
if (admHandler.getMappingDefinition().getLookupTables() != null && admHandler.getMappingDefinition().getLookupTables().getLookupTable() != null) {
for (LookupTable table : admHandler.getMappingDefinition().getLookupTables().getLookupTable()) {
lookupTables.put(table.getName(), table);
}
}
AtlasModuleInfoRegistry moduleInfoRegistry = factory.getModuleInfoRegistry();
for (DataSource ds : admHandler.getMappingDefinition().getDataSource()) {
AtlasModuleInfo moduleInfo = moduleInfoRegistry.lookupByUri(ds.getUri());
if (moduleInfo == null) {
LOG.error("Cannot find module info for the DataSource uri '{}'", ds.getUri());
continue;
}
if (ds.getDataSourceType() != DataSourceType.SOURCE && ds.getDataSourceType() != DataSourceType.TARGET) {
LOG.error("Unsupported DataSource type '{}'", ds.getDataSourceType());
continue;
}
String docId = ds.getId();
if (docId == null || docId.isEmpty()) {
docId = ds.getDataSourceType() == DataSourceType.SOURCE ? AtlasConstants.DEFAULT_SOURCE_DOCUMENT_ID : AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID;
}
if (ds.getDataSourceType() == DataSourceType.SOURCE && sourceModules.containsKey(docId)) {
LOG.error("Duplicated {} DataSource ID '{}' was detected, ignoring...", ds.getDataSourceType(), ds.getId());
continue;
}
if (ds.getDataSourceType() == DataSourceType.TARGET && targetModules.containsKey(docId)) {
LOG.error("Duplicated {} DataSource ID '{}' was detected, ignoring...", ds.getDataSourceType(), docId);
continue;
}
try {
AtlasModule module = moduleInfo.getModuleClass().getDeclaredConstructor().newInstance();
module.setClassLoader(factory.getClassLoader());
module.setConversionService(factory.getConversionService());
module.setFieldActionService(factory.getFieldActionService());
module.setDataSource(ds);
if (ds.getDataSourceType() == DataSourceType.SOURCE) {
getSourceModules().put(docId, module);
} else if (ds.getDataSourceType() == DataSourceType.TARGET) {
getTargetModules().put(docId, module);
}
if (this.dataSourceMetadataMap != null) {
DataSourceKey dskey = new DataSourceKey(ds.getDataSourceType() == DataSourceType.SOURCE, docId);
DataSourceMetadata meta = this.dataSourceMetadataMap.get(dskey);
if (meta != null) {
module.setDataSourceMetadata(meta);
}
}
module.init();
} catch (Exception t) {
LOG.error("Unable to initialize {} module: {}", ds.getDataSourceType(), moduleInfo);
LOG.error(t.getMessage(), t);
throw new AtlasException(String.format("Unable to initialize %s module: %s", ds.getDataSourceType(), moduleInfo.toString()), t);
}
}
initialized = true;
}
use of com.google.cloud.bigquery.datatransfer.v1.DataSource in project atlasmap by atlasmap.
the class DefaultAtlasValidationService method validateMapping.
@Override
public List<Validation> validateMapping(AtlasMapping mapping) {
if (mapping == null) {
throw new IllegalArgumentException("Mapping definition must not be null");
}
List<Validation> validations = new ArrayList<>();
Validators.MAPPING_NAME.get().validate(mapping.getName(), validations, null);
List<DataSource> dataSources = mapping.getDataSource();
for (DataSource ds : dataSources) {
switch(ds.getDataSourceType()) {
case SOURCE:
Validators.DATASOURCE_SOURCE_URI.get().validate(ds.getUri(), validations, ds.getId());
break;
case TARGET:
Validators.DATASOURCE_TARGET_URI.get().validate(ds.getUri(), validations, ds.getId());
break;
default:
throw new IllegalArgumentException(String.format("Unknown DataSource type '%s'", ds.getDataSourceType()));
}
}
validateFieldMappings(mapping.getMappings(), mapping.getLookupTables(), validations);
return validations;
}
use of com.google.cloud.bigquery.datatransfer.v1.DataSource in project atlasmap by atlasmap.
the class AtlasBaseActionsTest method runActionTestList.
public Object runActionTestList(List<Action> actions, String sourceFirstName, Object targetExpected, Class<?> targetClassExpected) throws Exception {
System.out.println("Now running test for actions: " + actions);
System.out.println("Input: " + sourceFirstName);
System.out.println("Expected output: " + targetExpected);
Mapping m = new Mapping();
m.setMappingType(MappingType.MAP);
m.getInputField().add(this.sourceField);
m.getOutputField().add(this.targetField);
if (actions != null) {
m.getOutputField().get(0).setActions(new ArrayList<Action>());
m.getOutputField().get(0).getActions().addAll(actions);
}
DataSource src = new DataSource();
src.setDataSourceType(DataSourceType.SOURCE);
src.setUri(this.docURI);
DataSource tgt = new DataSource();
tgt.setDataSourceType(DataSourceType.TARGET);
tgt.setUri(this.docURI);
AtlasMapping atlasMapping = new AtlasMapping();
atlasMapping.setName("fieldactiontest");
atlasMapping.setMappings(new Mappings());
atlasMapping.getMappings().getMapping().add(m);
atlasMapping.getDataSource().add(src);
atlasMapping.getDataSource().add(tgt);
String tmpFile = "target/fieldactions-" + this.getClass().getSimpleName() + "-tmp.txt";
Json.mapper().writeValue(new File(tmpFile), atlasMapping);
AtlasContext context = atlasContextFactory.createContext(new File(tmpFile).toURI());
AtlasSession session = context.createSession();
session.setDefaultSourceDocument(createSource(sourceFirstName));
context.process(session);
Object targetActual = session.getDefaultTargetDocument();
assertNotNull(targetActual);
targetActual = getTargetValue(targetActual, targetClassExpected);
if (targetExpected != null) {
assertEquals(targetExpected, targetActual);
}
return targetActual;
}
use of com.google.cloud.bigquery.datatransfer.v1.DataSource in project atlasmap by atlasmap.
the class BaseValidatorTest method getAtlasMappingFullValid.
protected AtlasMapping getAtlasMappingFullValid() {
AtlasMapping mapping = AtlasModelFactory.createAtlasMapping();
mapping.setName("thisis_a_valid.name");
DataSource src = new DataSource();
src.setDataSourceType(DataSourceType.SOURCE);
src.setUri("atlas:java?2");
DataSource tgt = new DataSource();
tgt.setDataSourceType(DataSourceType.TARGET);
tgt.setUri("atlas:java?3");
mapping.getDataSource().add(src);
mapping.getDataSource().add(tgt);
Mapping mapFieldMapping = AtlasModelFactory.createMapping(MappingType.MAP);
MockField inputField = AtlasModelFactory.createMockField();
inputField.setFieldType(FieldType.STRING);
inputField.setCustom("java.lang.String");
inputField.setName("inputName");
mapFieldMapping.getInputField().add(inputField);
MockField outputField = AtlasModelFactory.createMockField();
outputField.setFieldType(FieldType.STRING);
outputField.setCustom("java.lang.String");
outputField.setName("outputName");
mapFieldMapping.getOutputField().add(outputField);
Mapping separateMapping = AtlasModelFactory.createMapping(MappingType.SEPARATE);
MockField sIJavaField = AtlasModelFactory.createMockField();
sIJavaField.setFieldType(FieldType.STRING);
sIJavaField.setCustom("java.lang.String");
sIJavaField.setName("inputName");
separateMapping.getInputField().add(sIJavaField);
MockField sOJavaField = AtlasModelFactory.createMockField();
sOJavaField.setFieldType(FieldType.STRING);
sOJavaField.setCustom("java.lang.String");
sOJavaField.setName("outputName");
sOJavaField.setIndex(0);
separateMapping.getOutputField().add(sOJavaField);
mapping.getMappings().getMapping().add(mapFieldMapping);
mapping.getMappings().getMapping().add(separateMapping);
return mapping;
}
use of com.google.cloud.bigquery.datatransfer.v1.DataSource in project atlasmap by atlasmap.
the class AtlasEndpointTest method doConversionIfJsonDataSource.
@Test
public void doConversionIfJsonDataSource() throws Exception {
final List<DataSource> dataSources = new ArrayList<>();
final DataSource dataSource = new DataSource();
dataSource.setDataSourceType(DataSourceType.SOURCE);
dataSource.setUri("atlas:json:SomeType");
dataSources.add(dataSource);
perform(dataSources, null, null, true);
}
Aggregations