use of com.bakdata.conquery.models.jobs.JobManager in project conquery by bakdata.
the class ManagerNode method run.
public void run(ConqueryConfig config, Environment environment) throws InterruptedException {
this.environment = environment;
validator = environment.getValidator();
client = new JerseyClientBuilder(environment).using(config.getJerseyClient()).build(getName());
// Instantiate DatasetRegistry and MetaStorage so they are ready for injection into the object mapper (API + Storage)
// The validator is already injected at this point see Conquery.java
datasetRegistry = new DatasetRegistry(config.getCluster().getEntityBucketSize());
storage = new MetaStorage(datasetRegistry);
datasetRegistry.injectInto(environment.getObjectMapper());
storage.injectInto(environment.getObjectMapper());
jobManager = new JobManager("ManagerNode", config.isFailOnError());
formScanner = new FormScanner();
this.config = config;
config.initialize(this);
// Initialization of internationalization
I18n.init();
RESTServer.configure(config, environment.jersey().getResourceConfig());
maintenanceService = environment.lifecycle().scheduledExecutorService("Maintenance Service").build();
environment.lifecycle().manage(this);
loadNamespaces();
loadMetaStorage();
authController = new AuthorizationController(storage, config.getAuthorizationRealms());
environment.lifecycle().manage(authController);
unprotectedAuthAdmin = AuthServlet.generalSetup(environment.metrics(), config, environment.admin(), environment.getObjectMapper());
unprotectedAuthApi = AuthServlet.generalSetup(environment.metrics(), config, environment.servlets(), environment.getObjectMapper());
// Create AdminServlet first to make it available to the realms
admin = new AdminServlet(this);
authController.externalInit(this, config.getAuthenticationRealms());
// Register default components for the admin interface
admin.register(this);
log.info("Registering ResourcesProvider");
for (Class<? extends ResourcesProvider> resourceProvider : CPSTypeIdResolver.listImplementations(ResourcesProvider.class)) {
try {
ResourcesProvider provider = resourceProvider.getConstructor().newInstance();
provider.registerResources(this);
providers.add(provider);
} catch (Exception e) {
log.error("Failed to register Resource {}", resourceProvider, e);
}
}
try {
formScanner.execute(null, null);
} catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
environment.admin().addTask(formScanner);
environment.admin().addTask(new QueryCleanupTask(storage, Duration.of(config.getQueries().getOldQueriesTime().getQuantity(), config.getQueries().getOldQueriesTime().getUnit().toChronoUnit())));
environment.admin().addTask(new PermissionCleanupTask(storage));
environment.admin().addTask(new ClearFilterSourceSearch());
environment.admin().addTask(new ReportConsistencyTask(datasetRegistry));
ShutdownTask shutdown = new ShutdownTask();
environment.admin().addTask(shutdown);
environment.lifecycle().addServerLifecycleListener(shutdown);
}
use of com.bakdata.conquery.models.jobs.JobManager in project conquery by bakdata.
the class ShardNode method run.
@Override
protected void run(Environment environment, Namespace namespace, ConqueryConfig config) throws Exception {
this.environment = environment;
connector = new NioSocketConnector();
jobManager = new JobManager(getName(), config.isFailOnError());
synchronized (environment) {
environment.lifecycle().manage(this);
validator = environment.getValidator();
scheduler = environment.lifecycle().scheduledExecutorService("Scheduled Messages").build();
}
this.config = config;
config.initialize(this);
scheduler.scheduleAtFixedRate(this::reportJobManagerStatus, 30, 1, TimeUnit.SECONDS);
final ObjectMapper binaryMapper = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER));
((MutableInjectableValues) binaryMapper.getInjectableValues()).add(Validator.class, environment.getValidator());
workers = new Workers(getConfig().getQueries().getExecutionPool(), binaryMapper, getConfig().getCluster().getEntityBucketSize());
final Collection<WorkerStorage> workerStorages = config.getStorage().loadWorkerStorages();
for (WorkerStorage workerStorage : workerStorages) {
workers.createWorker(workerStorage, config.isFailOnError());
}
log.info("All Worker Storages loaded: {}", workers.getWorkers().size());
}
Aggregations