use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class SlaveServerConfig method openRepository.
private void openRepository(String repositoryId) throws KettleException {
try {
RepositoriesMeta repositoriesMeta = new RepositoriesMeta();
repositoriesMeta.readData();
repositoryMeta = repositoriesMeta.findRepository(repositoryId);
if (repositoryMeta == null) {
throw new KettleException("Unable to find repository: " + repositoryId);
}
PluginRegistry registry = PluginRegistry.getInstance();
repository = registry.loadClass(RepositoryPluginType.class, repositoryMeta, Repository.class);
repository.init(repositoryMeta);
repository.connect(repositoryUsername, repositoryPassword);
//
if (repository.getMetaStore() != null) {
metaStore.addMetaStore(0, repository.getMetaStore());
metaStore.setActiveMetaStoreName(repository.getMetaStore().getName());
}
LogChannel.GENERAL.logBasic("Connected to repository '" + repository.getName() + "'");
} catch (Exception e) {
throw new KettleException("Unable to open repository connection", e);
}
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class TransProfileFactory method generateTransformation.
public TransMeta generateTransformation(LoggingObjectInterface parentLoggingInterface) throws KettleException {
PluginRegistry registry = PluginRegistry.getInstance();
// Get the list of fields from the table...
//
tableLayout = getTableFields(parentLoggingInterface);
// Now start building the transformation...
//
TransMeta transMeta = new TransMeta(databaseMeta);
transMeta.addDatabase(databaseMeta);
// Create a step to read the content of the table
// Read the data from the database table...
// For now we read it all, later we add options to only read the first X rows
//
TableInputMeta readMeta = new TableInputMeta();
readMeta.setSQL("SELECT * FROM " + schemaTable);
readMeta.setDatabaseMeta(databaseMeta);
StepMeta read = new StepMeta(registry.getPluginId(StepPluginType.class, readMeta), "Read data", readMeta);
read.setLocation(50, 50);
read.setDraw(true);
transMeta.addStep(read);
// Grab the data types too
//
// Now calculate the requested statistics for all fields...
// TODO: create configuration possibility
// For now, just do : min, max, sum, count, avg, std dev. (7)
//
int[] numericCalculations = new int[] { GroupByMeta.TYPE_GROUP_MIN, GroupByMeta.TYPE_GROUP_MAX, GroupByMeta.TYPE_GROUP_SUM, GroupByMeta.TYPE_GROUP_COUNT_ALL, GroupByMeta.TYPE_GROUP_AVERAGE, GroupByMeta.TYPE_GROUP_STANDARD_DEVIATION };
int[] stringCalculations = new int[] { GroupByMeta.TYPE_GROUP_MIN, GroupByMeta.TYPE_GROUP_MAX, GroupByMeta.TYPE_GROUP_COUNT_ALL };
int[] dateCalculations = new int[] { GroupByMeta.TYPE_GROUP_MIN, GroupByMeta.TYPE_GROUP_MAX, GroupByMeta.TYPE_GROUP_COUNT_ALL };
int[] booleanCalculations = new int[] { GroupByMeta.TYPE_GROUP_MIN, GroupByMeta.TYPE_GROUP_MAX, GroupByMeta.TYPE_GROUP_COUNT_ALL };
// Run it through the "group by" step without a grouping.
// Later, we can use the UnivariateStats plugin/step perhaps.
//
GroupByMeta statsMeta = new GroupByMeta();
int nrNumeric = 0;
int nrDates = 0;
int nrStrings = 0;
int nrBooleans = 0;
for (ValueMetaInterface valueMeta : tableLayout.getValueMetaList()) {
if (valueMeta.isNumeric()) {
nrNumeric++;
}
if (valueMeta.isDate()) {
nrDates++;
}
if (valueMeta.isString()) {
nrStrings++;
}
if (valueMeta.isBoolean()) {
nrBooleans++;
}
}
int nrCalculations = nrNumeric * numericCalculations.length + nrDates * dateCalculations.length + nrStrings * stringCalculations.length + nrBooleans * booleanCalculations.length;
statsMeta.allocate(0, nrCalculations);
int calcIndex = 0;
for (int i = 0; i < tableLayout.size(); i++) {
ValueMetaInterface valueMeta = tableLayout.getValueMeta(i);
//
if (valueMeta.isNumeric()) {
// CHECKSTYLE:LineLength:OFF
for (int c = 0; c < numericCalculations.length; c++) {
statsMeta.getAggregateField()[calcIndex] = valueMeta.getName() + "(" + GroupByMeta.getTypeDesc(numericCalculations[c]) + ")";
statsMeta.getSubjectField()[calcIndex] = valueMeta.getName();
statsMeta.getAggregateType()[calcIndex] = numericCalculations[c];
calcIndex++;
}
}
//
if (valueMeta.isString()) {
// CHECKSTYLE:LineLength:OFF
for (int c = 0; c < stringCalculations.length; c++) {
statsMeta.getAggregateField()[calcIndex] = valueMeta.getName() + "(" + GroupByMeta.getTypeDesc(stringCalculations[c]) + ")";
statsMeta.getSubjectField()[calcIndex] = valueMeta.getName();
statsMeta.getAggregateType()[calcIndex] = stringCalculations[c];
calcIndex++;
}
}
//
if (valueMeta.isDate()) {
for (int c = 0; c < dateCalculations.length; c++) {
statsMeta.getAggregateField()[calcIndex] = valueMeta.getName() + "(" + GroupByMeta.getTypeDesc(dateCalculations[c]) + ")";
statsMeta.getSubjectField()[calcIndex] = valueMeta.getName();
statsMeta.getAggregateType()[calcIndex] = dateCalculations[c];
calcIndex++;
}
}
//
if (valueMeta.isBoolean()) {
for (int c = 0; c < booleanCalculations.length; c++) {
statsMeta.getAggregateField()[calcIndex] = valueMeta.getName() + "(" + GroupByMeta.getTypeDesc(booleanCalculations[c]) + ")";
statsMeta.getSubjectField()[calcIndex] = valueMeta.getName();
statsMeta.getAggregateType()[calcIndex] = booleanCalculations[c];
calcIndex++;
}
}
}
StepMeta calc = new StepMeta(registry.getPluginId(StepPluginType.class, statsMeta), "Calc", statsMeta);
calc.setLocation(250, 50);
calc.setDraw(true);
transMeta.addStep(calc);
TransHopMeta hop = new TransHopMeta(read, calc);
transMeta.addTransHop(hop);
DummyTransMeta dummyMeta = new DummyTransMeta();
StepMeta result = new StepMeta(registry.getPluginId(StepPluginType.class, dummyMeta), RESULT_STEP_NAME, dummyMeta);
result.setLocation(450, 50);
result.setDraw(true);
transMeta.addStep(result);
TransHopMeta hop2 = new TransHopMeta(calc, result);
transMeta.addTransHop(hop2);
return transMeta;
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class StepPartitioningMeta method getMethod.
public static final String getMethod(String name) {
if (Utils.isEmpty(name)) {
return methodCodes[PARTITIONING_METHOD_NONE];
}
for (int i = 0; i < methodDescriptions.length; i++) {
if (methodDescriptions[i].equalsIgnoreCase(name)) {
return methodCodes[i];
}
}
for (int i = 0; i < methodCodes.length; i++) {
if (methodCodes[i].equalsIgnoreCase(name)) {
return methodCodes[i];
}
}
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface plugin = registry.findPluginWithName(PartitionerPluginType.class, name);
if (plugin != null) {
return name;
}
plugin = registry.findPluginWithId(PartitionerPluginType.class, name);
if (plugin != null) {
return name;
}
return methodCodes[PARTITIONING_METHOD_NONE];
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class DatabaseMeta method getDatabaseFactory.
public DatabaseFactoryInterface getDatabaseFactory() throws Exception {
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface plugin = registry.getPlugin(DatabasePluginType.class, databaseInterface.getPluginId());
if (plugin == null) {
throw new KettleDatabaseException("database type with plugin id [" + databaseInterface.getPluginId() + "] couldn't be found!");
}
ClassLoader loader = registry.getClassLoader(plugin);
Class<?> clazz = Class.forName(databaseInterface.getDatabaseFactoryName(), true, loader);
return (DatabaseFactoryInterface) clazz.newInstance();
}
use of org.pentaho.di.core.plugins.PluginRegistry in project pentaho-kettle by pentaho.
the class ExtensionPointMap method reInitialize.
/**
* Reinitialize the extension point plugins map
*/
public void reInitialize() {
lock.writeLock().lock();
try {
extensionPointPluginMap = HashBasedTable.create();
final PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> extensionPointPlugins = registry.getPlugins(ExtensionPointPluginType.class);
for (PluginInterface extensionPointPlugin : extensionPointPlugins) {
addExtensionPoint(extensionPointPlugin);
}
} finally {
lock.writeLock().unlock();
}
}
Aggregations