use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class PGBulkLoaderMetaTest method setUp.
@Before
public void setUp() {
TransMeta transMeta = new TransMeta();
transMeta.setName("loader");
lm = new PGBulkLoaderMeta();
ld = new PGBulkLoaderData();
PluginRegistry plugReg = PluginRegistry.getInstance();
String loaderPid = plugReg.getPluginId(StepPluginType.class, lm);
stepMeta = new StepMeta(loaderPid, "loader", lm);
Trans trans = new Trans(transMeta);
transMeta.addStep(stepMeta);
loader = new PGBulkLoader(stepMeta, ld, 1, transMeta, trans);
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class WebServiceIT method testProcessRow.
public void testProcessRow() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("WebServiceTest");
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// Create a dummy step 1
//
String dummyStepname1 = "dummy step 1";
DummyTransMeta dm1 = new DummyTransMeta();
String dummyPid1 = registry.getPluginId(StepPluginType.class, dm1);
StepMeta dummyStep1 = new StepMeta(dummyPid1, dummyStepname1, dm1);
transMeta.addStep(dummyStep1);
TransHopMeta hi = new TransHopMeta(injectorStep, dummyStep1);
transMeta.addTransHop(hi);
//
// Create a String Cut step
//
String webServiceStepname = "web service step";
WebServiceMeta scm = new WebServiceMeta();
// scm.setUrl(HTTP_LOCALHOST_9998+ "wsdl");
// scm.setOperationName("CelciusToFahrenheit");
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.parse(new InputSource(new java.io.StringReader(STEP_META)));
scm.loadXML(doc.getFirstChild(), null, (IMetaStore) null);
String webServicePid = registry.getPluginId(StepPluginType.class, scm);
StepMeta webServiceStep = new StepMeta(webServicePid, webServiceStepname, scm);
transMeta.addStep(webServiceStep);
TransHopMeta hi2 = new TransHopMeta(dummyStep1, webServiceStep);
transMeta.addTransHop(hi2);
//
// Create a dummy step 2
//
String dummyStepname2 = "dummy step 2";
DummyTransMeta dm2 = new DummyTransMeta();
String dummyPid2 = registry.getPluginId(StepPluginType.class, dm2);
StepMeta dummyStep2 = new StepMeta(dummyPid2, dummyStepname2, dm2);
transMeta.addStep(dummyStep2);
TransHopMeta hi3 = new TransHopMeta(webServiceStep, dummyStep2);
transMeta.addTransHop(hi3);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(dummyStepname1, 0);
RowStepCollector dummyRc1 = new RowStepCollector();
si.addRowListener(dummyRc1);
si = trans.getStepInterface(webServiceStepname, 0);
RowStepCollector webServiceRc = new RowStepCollector();
si.addRowListener(webServiceRc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createData(createRowMetaInterface(), new Object[][] { new Object[] { 10 } });
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
List<RowMetaAndData> goldRows = createData(createOutputRowMetaInterface(), new Object[][] { new Object[] { 10, new BigDecimal(20) } });
List<RowMetaAndData> resultRows2 = webServiceRc.getRowsWritten();
assertEquals(goldRows, resultRows2);
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryStepDelegate method loadStepMeta.
/**
* Create a new step by loading the metadata from the specified repository.
*
* @param rep
* @param stepId
* @param databases
* @param counters
* @param partitionSchemas
* @throws KettleException
*/
public StepMeta loadStepMeta(ObjectId stepId, List<DatabaseMeta> databases, List<PartitionSchema> partitionSchemas) throws KettleException {
StepMeta stepMeta = new StepMeta();
PluginRegistry registry = PluginRegistry.getInstance();
try {
RowMetaAndData r = getStep(stepId);
if (r != null) {
stepMeta.setObjectId(stepId);
stepMeta.setName(r.getString(KettleDatabaseRepository.FIELD_STEP_NAME, null));
stepMeta.setDescription(r.getString(KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, null));
long id_step_type = r.getInteger(KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, -1L);
RowMetaAndData steptyperow = getStepType(new LongObjectId(id_step_type));
stepMeta.setStepID(steptyperow.getString(KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, null));
stepMeta.setDistributes(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, true));
int copies = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_COPIES, 1);
String copiesString = r.getString(KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, null);
if (!Utils.isEmpty(copiesString)) {
stepMeta.setCopiesString(copiesString);
} else {
stepMeta.setCopies(copies);
}
int x = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 0);
int y = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 0);
stepMeta.setLocation(new Point(x, y));
stepMeta.setDraw(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, false));
// Generate the appropriate class...
PluginInterface sp = registry.findPluginWithId(StepPluginType.class, stepMeta.getStepID());
if (sp == null) {
stepMeta.setStepMetaInterface(new MissingTrans(stepMeta.getName(), stepMeta.getStepID()));
} else {
stepMeta.setStepMetaInterface((StepMetaInterface) registry.loadClass(sp));
}
if (stepMeta.getStepMetaInterface() != null) {
// Read the step info from the repository!
readRepCompatibleStepMeta(stepMeta.getStepMetaInterface(), repository, stepMeta.getObjectId(), databases);
stepMeta.getStepMetaInterface().readRep(repository, repository.metaStore, stepMeta.getObjectId(), databases);
}
// Get the partitioning as well...
//
stepMeta.setStepPartitioningMeta(loadStepPartitioningMeta(stepMeta.getObjectId()));
stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(partitionSchemas);
// Get the cluster schema name
//
stepMeta.setClusterSchemaName(repository.getStepAttributeString(stepId, "cluster_schema"));
// Are we using a custom row distribution plugin?
//
String rowDistributionCode = repository.getStepAttributeString(stepId, 0, "row_distribution_code");
RowDistributionInterface rowDistribution = PluginRegistry.getInstance().loadClass(RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class);
stepMeta.setRowDistribution(rowDistribution);
// Load the attribute groups map
//
stepMeta.setAttributesMap(loadStepAttributesMap(stepId));
//
return stepMeta;
} else {
throw new KettleException(BaseMessages.getString(PKG, "StepMeta.Exception.StepInfoCouldNotBeFound", String.valueOf(stepId)));
}
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "StepMeta.Exception.StepCouldNotBeLoaded", String.valueOf(stepMeta.getObjectId())), dbe);
}
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobEntryDelegate method loadJobEntryCopy.
/**
* Load the chef graphical entry from repository We load type, name & description if no entry can be found.
*
* @param log
* the logging channel
* @param rep
* the Repository
* @param jobId
* The job ID
* @param jobEntryCopyId
* The jobentry copy ID
* @param jobentries
* A list with all jobentries
* @param databases
* A list with all defined databases
*/
public JobEntryCopy loadJobEntryCopy(ObjectId jobId, ObjectId jobEntryCopyId, List<JobEntryInterface> jobentries, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, String jobname) throws KettleException {
JobEntryCopy jobEntryCopy = new JobEntryCopy();
try {
jobEntryCopy.setObjectId(jobEntryCopyId);
// Handle GUI information: nr, location, ...
RowMetaAndData r = getJobEntryCopy(jobEntryCopyId);
if (r != null) {
// These are the jobentry_copy fields...
//
ObjectId jobEntryId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, 0));
ObjectId jobEntryTypeId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, 0));
jobEntryCopy.setNr((int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 0));
int locx = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 0);
int locy = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 0);
boolean isdrawn = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, false);
boolean isparallel = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, false);
// Do we have the jobentry already?
//
jobEntryCopy.setEntry(JobMeta.findJobEntry(jobentries, jobEntryId));
if (jobEntryCopy.getEntry() == null) {
// What type of jobentry do we load now?
// Get the jobentry type code
//
RowMetaAndData rt = getJobEntryType(new LongObjectId(jobEntryTypeId));
if (rt != null) {
String jet_code = rt.getString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, null);
JobEntryInterface jobEntry = null;
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, jet_code);
if (jobPlugin == null) {
jobEntry = new MissingEntry(jobname, jet_code);
} else {
jobEntry = (JobEntryInterface) registry.loadClass(jobPlugin);
}
if (jobEntry != null) {
jobEntryCopy.setEntry(jobEntry);
//
if (jobEntry instanceof JobEntryBase) {
loadJobEntryBase((JobEntryBase) jobEntry, jobEntryId, databases, slaveServers);
((JobEntryBase) jobEntry).setAttributesMap(loadJobEntryAttributesMap(jobId, jobEntryId));
}
compatibleJobEntryLoadRep(jobEntry, repository, jobEntryTypeId, databases, slaveServers);
jobEntry.loadRep(repository, repository.metaStore, jobEntryId, databases, slaveServers);
jobEntryCopy.getEntry().setObjectId(jobEntryId);
jobentries.add(jobEntryCopy.getEntry());
} else {
throw new KettleException("JobEntryLoader was unable to find Job Entry Plugin with description [" + jet_code + "].");
}
} else {
throw new KettleException("Unable to find Job Entry Type with id=" + jobEntryTypeId + " in the repository");
}
}
jobEntryCopy.setLocation(locx, locy);
jobEntryCopy.setDrawn(isdrawn);
jobEntryCopy.setLaunchingInParallel(isparallel);
return jobEntryCopy;
} else {
throw new KettleException("Unable to find job entry copy in repository with id_jobentry_copy=" + jobEntryCopyId);
}
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy=" + jobEntryCopyId, dbe);
}
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class JobEntryEvalTableContentTest method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
KettleClientEnvironment.init();
dbMap.put(DatabaseInterface.class, DBMockIface.class.getName());
dbMap.put(InfobrightDatabaseMeta.class, InfobrightDatabaseMeta.class.getName());
PluginRegistry preg = PluginRegistry.getInstance();
PluginInterface mockDbPlugin = mock(PluginInterface.class);
when(mockDbPlugin.matches(anyString())).thenReturn(true);
when(mockDbPlugin.isNativePlugin()).thenReturn(true);
when(mockDbPlugin.getMainType()).thenAnswer((Answer<Class<?>>) invocation -> DatabaseInterface.class);
when(mockDbPlugin.getPluginType()).thenAnswer((Answer<Class<? extends PluginTypeInterface>>) invocation -> DatabasePluginType.class);
when(mockDbPlugin.getIds()).thenReturn(new String[] { "Oracle", "mock-db-id" });
when(mockDbPlugin.getName()).thenReturn("mock-db-name");
when(mockDbPlugin.getClassMap()).thenReturn(dbMap);
preg.registerPlugin(DatabasePluginType.class, mockDbPlugin);
}
Aggregations