use of java.util.concurrent.RejectedExecutionException in project hbase by apache.
the class TestPartitionedMobCompactor method createThreadPool.
private static ExecutorService createThreadPool() {
int maxThreads = 10;
long keepAliveTime = 60;
final SynchronousQueue<Runnable> queue = new SynchronousQueue<>();
ThreadPoolExecutor pool = new ThreadPoolExecutor(1, maxThreads, keepAliveTime, TimeUnit.SECONDS, queue, Threads.newDaemonThreadFactory("MobFileCompactionChore"), new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
try {
// waiting for a thread to pick up instead of throwing exceptions.
queue.put(r);
} catch (InterruptedException e) {
throw new RejectedExecutionException(e);
}
}
});
((ThreadPoolExecutor) pool).allowCoreThreadTimeOut(true);
return pool;
}
use of java.util.concurrent.RejectedExecutionException in project hive by apache.
the class SQLOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.PENDING);
boolean runAsync = shouldRunAsync();
final boolean asyncPrepare = runAsync && HiveConf.getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_ASYNC_COMPILE);
if (!asyncPrepare) {
prepare(queryState);
}
if (!runAsync) {
runQuery();
} else {
// We'll pass ThreadLocals in the background thread from the foreground (handler) thread.
// 1) ThreadLocal Hive object needs to be set in background thread
// 2) The metastore client in Hive is associated with right user.
// 3) Current UGI will get used by metastore when metastore is in embedded mode
Runnable work = new BackgroundWork(getCurrentUGI(), parentSession.getSessionHive(), SessionState.getPerfLogger(), SessionState.get(), asyncPrepare);
try {
// This submit blocks if no background threads are available to run this operation
Future<?> backgroundHandle = getParentSession().submitBackgroundOperation(work);
setBackgroundHandle(backgroundHandle);
} catch (RejectedExecutionException rejected) {
setState(OperationState.ERROR);
throw new HiveSQLException("The background threadpool cannot accept" + " new task for execution, please retry the operation", rejected);
}
}
}
use of java.util.concurrent.RejectedExecutionException in project storm by apache.
the class Localizer method updateBlobs.
/**
* This function updates blobs on the supervisor. It uses a separate thread pool and runs
* asynchronously of the download and delete.
*/
public List<LocalizedResource> updateBlobs(List<LocalResource> localResources, String user) throws AuthorizationException, KeyNotFoundException, IOException {
LocalizedResourceSet lrsrcSet = _userRsrc.get(user);
ArrayList<LocalizedResource> results = new ArrayList<>();
ArrayList<Callable<LocalizedResource>> updates = new ArrayList<>();
if (lrsrcSet == null) {
// resource set must have been removed
return results;
}
ClientBlobStore blobstore = null;
try {
blobstore = getClientBlobStore();
for (LocalResource localResource : localResources) {
String key = localResource.getBlobName();
LocalizedResource lrsrc = lrsrcSet.get(key, localResource.shouldUncompress());
if (lrsrc == null) {
LOG.warn("blob requested for update doesn't exist: {}", key);
continue;
} else {
// update it if either the version isn't the latest or if any local blob files are missing
if (!isLocalizedResourceUpToDate(lrsrc, blobstore) || !isLocalizedResourceDownloaded(lrsrc)) {
LOG.debug("updating blob: {}", key);
updates.add(new DownloadBlob(this, _conf, key, new File(lrsrc.getFilePath()), user, lrsrc.isUncompressed(), true));
}
}
}
} finally {
if (blobstore != null) {
blobstore.shutdown();
}
}
try {
List<Future<LocalizedResource>> futures = _updateExecService.invokeAll(updates);
for (Future<LocalizedResource> futureRsrc : futures) {
try {
LocalizedResource lrsrc = futureRsrc.get();
// put the resource just in case it was removed at same time by the cleaner
LocalizedResourceSet newSet = new LocalizedResourceSet(user);
LocalizedResourceSet newlrsrcSet = _userRsrc.putIfAbsent(user, newSet);
if (newlrsrcSet == null) {
newlrsrcSet = newSet;
}
newlrsrcSet.putIfAbsent(lrsrc.getKey(), lrsrc, lrsrc.isUncompressed());
results.add(lrsrc);
} catch (ExecutionException e) {
LOG.error("Error updating blob: ", e);
if (e.getCause() instanceof AuthorizationException) {
throw (AuthorizationException) e.getCause();
}
if (e.getCause() instanceof KeyNotFoundException) {
throw (KeyNotFoundException) e.getCause();
}
}
}
} catch (RejectedExecutionException re) {
LOG.error("Error updating blobs : ", re);
} catch (InterruptedException ie) {
throw new IOException("Interrupted Exception", ie);
}
return results;
}
use of java.util.concurrent.RejectedExecutionException in project Talon-for-Twitter by klinker24.
the class ImageUtils method loadCircleImage.
public static void loadCircleImage(Context context, final ImageView iv, String url, BitmapLruCache mCache) {
BitmapDrawable wrapper = null;
if (url != null) {
wrapper = mCache.getFromMemoryCache(url);
}
if (null != wrapper && iv.getVisibility() != View.GONE) {
// The cache has it, so just display it
Log.v("talon_image_cache", "got image from cache");
iv.setImageDrawable(wrapper);
Animation fadeInAnimation = AnimationUtils.loadAnimation(context, R.anim.fade_in);
iv.startAnimation(fadeInAnimation);
} else if (url != null) {
// Memory Cache doesn't have the URL, do threaded request...
iv.setImageDrawable(null);
ImageUrlCircleAsyncTask mCurrentTask = new ImageUrlCircleAsyncTask(context, iv, mCache, false);
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
SDK11.executeOnThreadPool(mCurrentTask, url);
} else {
mCurrentTask.execute(url);
}
} catch (RejectedExecutionException e) {
// This shouldn't happen, but might.
}
}
}
use of java.util.concurrent.RejectedExecutionException in project hadoop by apache.
the class TestFileSystemOperationsWithThreads method testRenameThreadPoolExecuteSingleThreadFailure.
/*
* Test case for rename operation with multiple threads and flat listing enabled.
*/
@Test
public void testRenameThreadPoolExecuteSingleThreadFailure() throws Exception {
// Spy azure file system object and return mocked thread pool
NativeAzureFileSystem mockFs = Mockito.spy((NativeAzureFileSystem) fs);
// Spy a thread pool executor and link it to azure file system object.
String path = mockFs.pathToKey(mockFs.makeAbsolute(new Path("root")));
AzureFileSystemThreadPoolExecutor mockThreadPoolExecutor = Mockito.spy(mockFs.getThreadPoolExecutor(renameThreads, "AzureBlobRenameThread", "Rename", path, NativeAzureFileSystem.AZURE_RENAME_THREADS));
// With single iteration, we would have created 7 blobs resulting 7 threads.
Mockito.when(mockFs.getThreadPoolExecutor(renameThreads, "AzureBlobRenameThread", "Rename", path, NativeAzureFileSystem.AZURE_RENAME_THREADS)).thenReturn(mockThreadPoolExecutor);
// Create a thread executor and link it to mocked thread pool executor object.
ThreadPoolExecutor mockThreadExecutor = Mockito.spy(mockThreadPoolExecutor.getThreadPool(7));
Mockito.when(mockThreadPoolExecutor.getThreadPool(7)).thenReturn(mockThreadExecutor);
// Mock thread executor to throw exception for all requests.
Mockito.doCallRealMethod().doThrow(new RejectedExecutionException()).when(mockThreadExecutor).execute(Mockito.any(Runnable.class));
validateRenameFolder(mockFs, "root", "rootnew");
// Validate from logs that threads are enabled and unused threads exists.
String content = logs.getOutput();
assertTrue(content.contains("Using thread pool for Rename operation with threads 7"));
assertTrue(content.contains("6 threads not used for Rename operation on blob"));
}
Aggregations