use of org.apache.flink.util.DynamicCodeLoadingException in project flink by apache.
the class AbstractStateBackend method loadStateBackendFromConfig.
// ------------------------------------------------------------------------
// Loading the state backend from a configuration
// ------------------------------------------------------------------------
/**
* Loads the state backend from the configuration, from the parameter 'state.backend', as defined
* in {@link CoreOptions#STATE_BACKEND}.
*
* <p>The state backends can be specified either via their shortcut name, or via the class name
* of a {@link StateBackendFactory}. If a StateBackendFactory class name is specified, the factory
* is instantiated (via its zero-argument constructor) and its
* {@link StateBackendFactory#createFromConfig(Configuration)} method is called.
*
* <p>Recognized shortcut names are '{@value AbstractStateBackend#MEMORY_STATE_BACKEND_NAME}',
* '{@value AbstractStateBackend#FS_STATE_BACKEND_NAME}', and
* '{@value AbstractStateBackend#ROCKSDB_STATE_BACKEND_NAME}'.
*
* @param config The configuration to load the state backend from
* @param classLoader The class loader that should be used to load the state backend
* @param logger Optionally, a logger to log actions to (may be null)
*
* @return The instantiated state backend.
*
* @throws DynamicCodeLoadingException
* Thrown if a state backend factory is configured and the factory class was not
* found or the factory could not be instantiated
* @throws IllegalConfigurationException
* May be thrown by the StateBackendFactory when creating / configuring the state
* backend in the factory
* @throws IOException
* May be thrown by the StateBackendFactory when instantiating the state backend
*/
public static StateBackend loadStateBackendFromConfig(Configuration config, ClassLoader classLoader, @Nullable Logger logger) throws IllegalConfigurationException, DynamicCodeLoadingException, IOException {
checkNotNull(config, "config");
checkNotNull(classLoader, "classLoader");
final String backendName = config.getString(CoreOptions.STATE_BACKEND);
if (backendName == null) {
return null;
}
// by default the factory class is the backend name
String factoryClassName = backendName;
switch(backendName.toLowerCase()) {
case MEMORY_STATE_BACKEND_NAME:
if (logger != null) {
logger.info("State backend is set to heap memory (checkpoint to JobManager)");
}
return new MemoryStateBackend();
case FS_STATE_BACKEND_NAME:
FsStateBackend fsBackend = new FsStateBackendFactory().createFromConfig(config);
if (logger != null) {
logger.info("State backend is set to heap memory (checkpoints to filesystem \"{}\")", fsBackend.getBasePath());
}
return fsBackend;
case ROCKSDB_STATE_BACKEND_NAME:
factoryClassName = "org.apache.flink.contrib.streaming.state.RocksDBStateBackendFactory";
default:
if (logger != null) {
logger.info("Loading state backend via factory {}", factoryClassName);
}
StateBackendFactory<?> factory;
try {
@SuppressWarnings("rawtypes") Class<? extends StateBackendFactory> clazz = Class.forName(factoryClassName, false, classLoader).asSubclass(StateBackendFactory.class);
factory = clazz.newInstance();
} catch (ClassNotFoundException e) {
throw new DynamicCodeLoadingException("Cannot find configured state backend factory class: " + backendName, e);
} catch (ClassCastException | InstantiationException | IllegalAccessException e) {
throw new DynamicCodeLoadingException("The class configured under '" + CoreOptions.STATE_BACKEND.key() + "' is not a valid state backend factory (" + backendName + ')', e);
}
return factory.createFromConfig(config);
}
}
use of org.apache.flink.util.DynamicCodeLoadingException in project flink by apache.
the class ExecutionGraphBuilder method buildGraph.
/**
* Builds the ExecutionGraph from the JobGraph.
* If a prior execution graph exists, the JobGraph will be attached. If no prior execution
* graph exists, then the JobGraph will become attach to a new empty execution graph.
*/
public static ExecutionGraph buildGraph(@Nullable ExecutionGraph prior, JobGraph jobGraph, Configuration jobManagerConfig, ScheduledExecutorService futureExecutor, Executor ioExecutor, SlotProvider slotProvider, ClassLoader classLoader, CheckpointRecoveryFactory recoveryFactory, Time timeout, RestartStrategy restartStrategy, MetricGroup metrics, int parallelismForAutoMax, Logger log) throws JobExecutionException, JobException {
checkNotNull(jobGraph, "job graph cannot be null");
final String jobName = jobGraph.getName();
final JobID jobId = jobGraph.getJobID();
// create a new execution graph, if none exists so far
final ExecutionGraph executionGraph;
try {
executionGraph = (prior != null) ? prior : new ExecutionGraph(futureExecutor, ioExecutor, jobId, jobName, jobGraph.getJobConfiguration(), jobGraph.getSerializedExecutionConfig(), timeout, restartStrategy, jobGraph.getUserJarBlobKeys(), jobGraph.getClasspaths(), slotProvider, classLoader, metrics);
} catch (IOException e) {
throw new JobException("Could not create the execution graph.", e);
}
// set the basic properties
executionGraph.setScheduleMode(jobGraph.getScheduleMode());
executionGraph.setQueuedSchedulingAllowed(jobGraph.getAllowQueuedScheduling());
try {
executionGraph.setJsonPlan(JsonPlanGenerator.generatePlan(jobGraph));
} catch (Throwable t) {
log.warn("Cannot create JSON plan for job", t);
// give the graph an empty plan
executionGraph.setJsonPlan("{}");
}
// initialize the vertices that have a master initialization hook
// file output formats create directories here, input formats create splits
final long initMasterStart = System.nanoTime();
log.info("Running initialization on master for job {} ({}).", jobName, jobId);
for (JobVertex vertex : jobGraph.getVertices()) {
String executableClass = vertex.getInvokableClassName();
if (executableClass == null || executableClass.isEmpty()) {
throw new JobSubmissionException(jobId, "The vertex " + vertex.getID() + " (" + vertex.getName() + ") has no invokable class.");
}
if (vertex.getParallelism() == ExecutionConfig.PARALLELISM_AUTO_MAX) {
vertex.setParallelism(parallelismForAutoMax);
}
try {
vertex.initializeOnMaster(classLoader);
} catch (Throwable t) {
throw new JobExecutionException(jobId, "Cannot initialize task '" + vertex.getName() + "': " + t.getMessage(), t);
}
}
log.info("Successfully ran initialization on master in {} ms.", (System.nanoTime() - initMasterStart) / 1_000_000);
// topologically sort the job vertices and attach the graph to the existing one
List<JobVertex> sortedTopology = jobGraph.getVerticesSortedTopologicallyFromSources();
if (log.isDebugEnabled()) {
log.debug("Adding {} vertices from job graph {} ({}).", sortedTopology.size(), jobName, jobId);
}
executionGraph.attachJobGraph(sortedTopology);
if (log.isDebugEnabled()) {
log.debug("Successfully created execution graph from job graph {} ({}).", jobName, jobId);
}
// configure the state checkpointing
JobSnapshottingSettings snapshotSettings = jobGraph.getSnapshotSettings();
if (snapshotSettings != null) {
List<ExecutionJobVertex> triggerVertices = idToVertex(snapshotSettings.getVerticesToTrigger(), executionGraph);
List<ExecutionJobVertex> ackVertices = idToVertex(snapshotSettings.getVerticesToAcknowledge(), executionGraph);
List<ExecutionJobVertex> confirmVertices = idToVertex(snapshotSettings.getVerticesToConfirm(), executionGraph);
CompletedCheckpointStore completedCheckpoints;
CheckpointIDCounter checkpointIdCounter;
try {
int maxNumberOfCheckpointsToRetain = jobManagerConfig.getInteger(CoreOptions.MAX_RETAINED_CHECKPOINTS);
if (maxNumberOfCheckpointsToRetain <= 0) {
// warning and use 1 as the default value if the setting in
// state.checkpoints.max-retained-checkpoints is not greater than 0.
log.warn("The setting for '{} : {}' is invalid. Using default value of {}", CoreOptions.MAX_RETAINED_CHECKPOINTS.key(), maxNumberOfCheckpointsToRetain, CoreOptions.MAX_RETAINED_CHECKPOINTS.defaultValue());
maxNumberOfCheckpointsToRetain = CoreOptions.MAX_RETAINED_CHECKPOINTS.defaultValue();
}
completedCheckpoints = recoveryFactory.createCheckpointStore(jobId, maxNumberOfCheckpointsToRetain, classLoader);
checkpointIdCounter = recoveryFactory.createCheckpointIDCounter(jobId);
} catch (Exception e) {
throw new JobExecutionException(jobId, "Failed to initialize high-availability checkpoint handler", e);
}
// Maximum number of remembered checkpoints
int historySize = jobManagerConfig.getInteger(ConfigConstants.JOB_MANAGER_WEB_CHECKPOINTS_HISTORY_SIZE, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_CHECKPOINTS_HISTORY_SIZE);
CheckpointStatsTracker checkpointStatsTracker = new CheckpointStatsTracker(historySize, ackVertices, snapshotSettings, metrics);
// The default directory for externalized checkpoints
String externalizedCheckpointsDir = jobManagerConfig.getString(ConfigConstants.CHECKPOINTS_DIRECTORY_KEY, null);
// load the state backend for checkpoint metadata.
// if specified in the application, use from there, otherwise load from configuration
final StateBackend metadataBackend;
final StateBackend applicationConfiguredBackend = snapshotSettings.getDefaultStateBackend();
if (applicationConfiguredBackend != null) {
metadataBackend = applicationConfiguredBackend;
log.info("Using application-defined state backend for checkpoint/savepoint metadata: {}.", applicationConfiguredBackend);
} else {
try {
metadataBackend = AbstractStateBackend.loadStateBackendFromConfigOrCreateDefault(jobManagerConfig, classLoader, log);
} catch (IllegalConfigurationException | IOException | DynamicCodeLoadingException e) {
throw new JobExecutionException(jobId, "Could not instantiate configured state backend", e);
}
}
executionGraph.enableCheckpointing(snapshotSettings.getCheckpointInterval(), snapshotSettings.getCheckpointTimeout(), snapshotSettings.getMinPauseBetweenCheckpoints(), snapshotSettings.getMaxConcurrentCheckpoints(), snapshotSettings.getExternalizedCheckpointSettings(), triggerVertices, ackVertices, confirmVertices, checkpointIdCounter, completedCheckpoints, externalizedCheckpointsDir, metadataBackend, checkpointStatsTracker);
}
return executionGraph;
}
use of org.apache.flink.util.DynamicCodeLoadingException in project flink by apache.
the class StreamOperatorContextBuilder method build.
StreamOperatorStateContext build(Logger logger) throws IOException {
final Environment environment = new SavepointEnvironment.Builder(ctx, maxParallelism).setConfiguration(configuration).setSubtaskIndex(split.getSplitNumber()).setPrioritizedOperatorSubtaskState(split.getPrioritizedOperatorSubtaskState()).build();
StateBackend stateBackend;
try {
stateBackend = StateBackendLoader.fromApplicationOrConfigOrDefault(applicationStateBackend, TernaryBoolean.FALSE, configuration, ctx.getUserCodeClassLoader(), logger);
} catch (DynamicCodeLoadingException e) {
throw new IOException("Failed to load state backend", e);
}
StreamTaskStateInitializer initializer = new StreamTaskStateInitializerImpl(environment, stateBackend);
try {
return initializer.streamOperatorStateContext(operatorState.getOperatorID(), operatorState.getOperatorID().toString(), new NeverFireProcessingTimeService(), keyContext, keySerializer, registry, ctx.getMetricGroup(), 1.0, false);
} catch (Exception e) {
throw new IOException("Failed to restore state backend", e);
}
}
use of org.apache.flink.util.DynamicCodeLoadingException in project flink by apache.
the class StateBackendLoader method loadStateBackendFromConfig.
// ------------------------------------------------------------------------
// Loading the state backend from a configuration
// ------------------------------------------------------------------------
/**
* Loads the unwrapped state backend from the configuration, from the parameter 'state.backend',
* as defined in {@link StateBackendOptions#STATE_BACKEND}.
*
* <p>The state backends can be specified either via their shortcut name, or via the class name
* of a {@link StateBackendFactory}. If a StateBackendFactory class name is specified, the
* factory is instantiated (via its zero-argument constructor) and its {@link
* StateBackendFactory#createFromConfig(ReadableConfig, ClassLoader)} method is called.
*
* <p>Recognized shortcut names are '{@value StateBackendLoader#HASHMAP_STATE_BACKEND_NAME}',
* '{@value StateBackendLoader#ROCKSDB_STATE_BACKEND_NAME}' '{@value
* StateBackendLoader#MEMORY_STATE_BACKEND_NAME}' (Deprecated), and '{@value
* StateBackendLoader#FS_STATE_BACKEND_NAME}' (Deprecated).
*
* @param config The configuration to load the state backend from
* @param classLoader The class loader that should be used to load the state backend
* @param logger Optionally, a logger to log actions to (may be null)
* @return The instantiated state backend.
* @throws DynamicCodeLoadingException Thrown if a state backend factory is configured and the
* factory class was not found or the factory could not be instantiated
* @throws IllegalConfigurationException May be thrown by the StateBackendFactory when creating
* / configuring the state backend in the factory
* @throws IOException May be thrown by the StateBackendFactory when instantiating the state
* backend
*/
public static StateBackend loadStateBackendFromConfig(ReadableConfig config, ClassLoader classLoader, @Nullable Logger logger) throws IllegalConfigurationException, DynamicCodeLoadingException, IOException {
checkNotNull(config, "config");
checkNotNull(classLoader, "classLoader");
final String backendName = config.get(StateBackendOptions.STATE_BACKEND);
if (backendName == null) {
return null;
}
// by default the factory class is the backend name
String factoryClassName = backendName;
switch(backendName.toLowerCase()) {
case MEMORY_STATE_BACKEND_NAME:
MemoryStateBackend backend = new MemoryStateBackendFactory().createFromConfig(config, classLoader);
if (logger != null) {
logger.warn("MemoryStateBackend has been deprecated. Please use 'hashmap' state " + "backend instead with JobManagerCheckpointStorage for equivalent " + "functionality");
logger.info("State backend is set to job manager {}", backend);
}
return backend;
case FS_STATE_BACKEND_NAME:
if (logger != null) {
logger.warn("{} state backend has been deprecated. Please use 'hashmap' state " + "backend instead.", backendName.toLowerCase());
}
// utilizes the same HeapKeyedStateBackend runtime implementation.
case HASHMAP_STATE_BACKEND_NAME:
HashMapStateBackend hashMapStateBackend = new HashMapStateBackendFactory().createFromConfig(config, classLoader);
if (logger != null) {
logger.info("State backend is set to heap memory {}", hashMapStateBackend);
}
return hashMapStateBackend;
case ROCKSDB_STATE_BACKEND_NAME:
factoryClassName = ROCKSDB_STATE_BACKEND_FACTORY;
default:
if (logger != null) {
logger.info("Loading state backend via factory {}", factoryClassName);
}
StateBackendFactory<?> factory;
try {
@SuppressWarnings("rawtypes") Class<? extends StateBackendFactory> clazz = Class.forName(factoryClassName, false, classLoader).asSubclass(StateBackendFactory.class);
factory = clazz.newInstance();
} catch (ClassNotFoundException e) {
throw new DynamicCodeLoadingException("Cannot find configured state backend factory class: " + backendName, e);
} catch (ClassCastException | InstantiationException | IllegalAccessException e) {
throw new DynamicCodeLoadingException("The class configured under '" + StateBackendOptions.STATE_BACKEND.key() + "' is not a valid state backend factory (" + backendName + ')', e);
}
return factory.createFromConfig(config, classLoader);
}
}
use of org.apache.flink.util.DynamicCodeLoadingException in project flink by apache.
the class StateBackendLoader method loadChangelogStateBackend.
private static StateBackend loadChangelogStateBackend(StateBackend backend, ClassLoader classLoader) throws DynamicCodeLoadingException {
// ChangelogStateBackend resides in a separate module, load it using reflection
try {
Constructor<? extends DelegatingStateBackend> constructor = Class.forName(CHANGELOG_STATE_BACKEND, false, classLoader).asSubclass(DelegatingStateBackend.class).getDeclaredConstructor(StateBackend.class);
constructor.setAccessible(true);
return constructor.newInstance(backend);
} catch (ClassNotFoundException e) {
throw new DynamicCodeLoadingException("Cannot find DelegateStateBackend class: " + CHANGELOG_STATE_BACKEND, e);
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new DynamicCodeLoadingException("Fail to initialize: " + CHANGELOG_STATE_BACKEND, e);
}
}
Aggregations