Search in sources :

Example 11 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project robovm by robovm.

the class SecureRandomTest method testSecureRandomThreadSafety.

public void testSecureRandomThreadSafety() throws Exception {
    final SecureRandom secureRandom = SecureRandom.getInstance(algorithmName);
    int threads = 2;
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ExecutorCompletionService ecs = new ExecutorCompletionService(executor);
    for (int t = 0; t < threads; t++) {
        ecs.submit(new Callable<Void>() {

            public Void call() {
                for (int i = 0; i < 1000; i++) {
                    secureRandom.generateSeed(1024);
                }
                return null;
            }
        });
    }
    executor.shutdown();
    for (int i = 0; i < threads; i++) {
        ecs.take().get();
    }
}
Also used : ExecutorService(java.util.concurrent.ExecutorService) SecureRandom(java.security.SecureRandom) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService)

Example 12 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project otter by alibaba.

the class ExecutorTemplate method start.

public void start() {
    completionService = new ExecutorCompletionService(executor);
    futures = Collections.synchronizedList(new ArrayList<Future>());
}
Also used : ArrayList(java.util.ArrayList) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService)

Example 13 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project otter by alibaba.

the class ArchiveBean method doPack.

/**
     * 执行压缩
     */
@SuppressWarnings("resource")
private boolean doPack(final File targetArchiveFile, List<FileData> fileDatas, final ArchiveRetriverCallback<FileData> callback) {
    // 首先判断下对应的目标文件是否存在,如存在则执行删除
    if (true == targetArchiveFile.exists() && false == NioUtils.delete(targetArchiveFile, 3)) {
        throw new ArchiveException(String.format("[%s] exist and delete failed", targetArchiveFile.getAbsolutePath()));
    }
    boolean exist = false;
    ZipOutputStream zipOut = null;
    Set<String> entryNames = new HashSet<String>();
    // 下载成功的任务列表
    BlockingQueue<Future<ArchiveEntry>> queue = new LinkedBlockingQueue<Future<ArchiveEntry>>();
    ExecutorCompletionService completionService = new ExecutorCompletionService(executor, queue);
    final File targetDir = new File(targetArchiveFile.getParentFile(), FilenameUtils.getBaseName(targetArchiveFile.getPath()));
    try {
        // 创建一个临时目录
        FileUtils.forceMkdir(targetDir);
        zipOut = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(targetArchiveFile)));
        zipOut.setLevel(Deflater.BEST_SPEED);
        // 进行并发压缩处理
        for (final FileData fileData : fileDatas) {
            if (fileData.getEventType().isDelete()) {
                // 忽略delete类型的数据打包,因为只需直接在目标进行删除
                continue;
            }
            String namespace = fileData.getNameSpace();
            String path = fileData.getPath();
            boolean isLocal = StringUtils.isBlank(namespace);
            String entryName = null;
            if (true == isLocal) {
                entryName = FilenameUtils.getPath(path) + FilenameUtils.getName(path);
            } else {
                entryName = namespace + File.separator + path;
            }
            // 过滤一些重复的文件数据同步
            if (entryNames.contains(entryName) == false) {
                entryNames.add(entryName);
            } else {
                continue;
            }
            final String name = entryName;
            if (true == isLocal && !useLocalFileMutliThread) {
                // 采用串行处理,不走临时文件
                queue.add(new DummyFuture(new ArchiveEntry(name, callback.retrive(fileData))));
            } else {
                completionService.submit(new Callable<ArchiveEntry>() {

                    public ArchiveEntry call() throws Exception {
                        // 处理下异常,可能失败
                        InputStream input = null;
                        OutputStream output = null;
                        try {
                            input = callback.retrive(fileData);
                            if (input instanceof LazyFileInputStream) {
                                // 获取原始的stream
                                input = ((LazyFileInputStream) input).getInputSteam();
                            }
                            if (input != null) {
                                File tmp = new File(targetDir, name);
                                // 尝试创建父路径
                                NioUtils.create(tmp.getParentFile(), false, 3);
                                output = new FileOutputStream(tmp);
                                // 拷贝到文件
                                NioUtils.copy(input, output);
                                return new ArchiveEntry(name, new File(targetDir, name));
                            } else {
                                return new ArchiveEntry(name);
                            }
                        } finally {
                            IOUtils.closeQuietly(input);
                            IOUtils.closeQuietly(output);
                        }
                    }
                });
            }
        }
        for (int i = 0; i < entryNames.size(); i++) {
            // 读入流
            ArchiveEntry input = null;
            InputStream stream = null;
            try {
                input = queue.take().get();
                if (input == null) {
                    continue;
                }
                stream = input.getStream();
                if (stream == null) {
                    continue;
                }
                if (stream instanceof LazyFileInputStream) {
                    // 获取原始的stream
                    stream = ((LazyFileInputStream) stream).getInputSteam();
                }
                exist = true;
                zipOut.putNextEntry(new ZipEntry(input.getName()));
                // 输出到压缩流中
                NioUtils.copy(stream, zipOut);
                zipOut.closeEntry();
            } finally {
                IOUtils.closeQuietly(stream);
            }
        }
        if (exist) {
            zipOut.finish();
        }
    } catch (Exception e) {
        throw new ArchiveException(e);
    } finally {
        IOUtils.closeQuietly(zipOut);
        try {
            // 删除临时目录
            FileUtils.deleteDirectory(targetDir);
        } catch (IOException e) {
        // ignore
        }
    }
    return exist;
}
Also used : FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) ZipOutputStream(de.schlichtherle.util.zip.ZipOutputStream) ZipEntry(de.schlichtherle.util.zip.ZipEntry) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) IOException(java.io.IOException) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) ExecutionException(java.util.concurrent.ExecutionException) ZipOutputStream(de.schlichtherle.util.zip.ZipOutputStream) FileOutputStream(java.io.FileOutputStream) Future(java.util.concurrent.Future) ZipFile(de.schlichtherle.util.zip.ZipFile) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream) FileData(com.alibaba.otter.shared.etl.model.FileData) HashSet(java.util.HashSet)

Example 14 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project otter by alibaba.

the class DatabaseExtractor method extract.

@Override
public void extract(DbBatch dbBatch) throws ExtractException {
    Assert.notNull(dbBatch);
    Assert.notNull(dbBatch.getRowBatch());
    // 读取配置
    Pipeline pipeline = getPipeline(dbBatch.getRowBatch().getIdentity().getPipelineId());
    boolean mustDb = pipeline.getParameters().getSyncConsistency().isMedia();
    // 如果是行记录是必须进行数据库反查
    boolean isRow = pipeline.getParameters().getSyncMode().isRow();
    // 读取一次配置
    // 调整下线程池,Extractor会被池化处理
    adjustPoolSize(pipeline.getParameters().getExtractPoolSize());
    ExecutorCompletionService completionService = new ExecutorCompletionService(executor);
    // 进行并发提交
    ExtractException exception = null;
    // 每个表进行处理
    List<DataItem> items = new ArrayList<DataItem>();
    List<Future> futures = new ArrayList<Future>();
    List<EventData> eventDatas = dbBatch.getRowBatch().getDatas();
    for (EventData eventData : eventDatas) {
        if (eventData.getEventType().isDdl()) {
            continue;
        }
        DataItem item = new DataItem(eventData);
        // 针对row模式,需要去检查一下当前是否已经包含row记录的所有字段,如果发现字段不足,则执行一次数据库查询
        boolean flag = mustDb || (eventData.getSyncConsistency() != null && eventData.getSyncConsistency().isMedia());
        // 增加一种case, 针对oracle erosa有时侯结果记录只有主键,没有变更字段,需要做一次反查
        if (!flag && CollectionUtils.isEmpty(eventData.getUpdatedColumns())) {
            DataMedia dataMedia = ConfigHelper.findDataMedia(pipeline, eventData.getTableId());
            if (dataMedia.getSource().getType().isOracle()) {
                flag |= true;
                // 针对这类数据,也统一视为补救的操作,可能erosa解析时反查数据库也不存在记录
                eventData.setRemedy(true);
            }
        }
        if (isRow && !flag) {
            // 提前判断一次,避免进入多线程进行竞争
            // 针对view视图的情况,会有后续再判断一次
            flag = checkNeedDbForRowMode(pipeline, eventData);
        }
        if (flag && (eventData.getEventType().isInsert() || eventData.getEventType().isUpdate())) {
            // 判断是否需要反查
            // 提交进行并行查询
            Future future = completionService.submit(new DatabaseExtractWorker(pipeline, item), null);
            if (future.isDone()) {
                // 立即判断一次,因为使用了CallerRun可能当场跑出结果,针对有异常时快速响应,而不是等跑完所有的才抛异常
                try {
                    future.get();
                } catch (InterruptedException e) {
                    // 取消完之后立马退出
                    cancel(futures);
                    throw new ExtractException(e);
                } catch (ExecutionException e) {
                    // 取消完之后立马退出
                    cancel(futures);
                    throw new ExtractException(e);
                }
            }
            // 记录一下添加的任务
            futures.add(future);
        }
        // 按顺序添加
        items.add(item);
    }
    // 开始处理结果
    int index = 0;
    while (index < futures.size()) {
        // 循环处理发出去的所有任务
        try {
            // 它也可能被打断
            Future future = completionService.take();
            future.get();
        } catch (InterruptedException e) {
            exception = new ExtractException(e);
            // 如何一个future出现了异常,就退出
            break;
        } catch (ExecutionException e) {
            exception = new ExtractException(e);
            // 如何一个future出现了异常,就退出
            break;
        }
        index++;
    }
    if (index < futures.size()) {
        // 小于代表有错误,需要对未完成的记录进行cancel操作,对已完成的结果进行收集,做重复录入过滤记录
        cancel(futures);
        throw exception;
    } else {
        // 全部成功分支, 构造返回结果也要保证原始的顺序
        for (int i = 0; i < items.size(); i++) {
            DataItem item = items.get(i);
            if (item.filter) {
                // 忽略需要被过滤的数据,比如数据库反查时记录已经不存在
                eventDatas.remove(item.getEventData());
            }
        }
    }
}
Also used : ExtractException(com.alibaba.otter.node.etl.extract.exceptions.ExtractException) ArrayList(java.util.ArrayList) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) EventData(com.alibaba.otter.shared.etl.model.EventData) Pipeline(com.alibaba.otter.shared.common.model.config.pipeline.Pipeline) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) DataMedia(com.alibaba.otter.shared.common.model.config.data.DataMedia)

Example 15 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project otter by alibaba.

the class FileLoadAction method moveFiles.

/**
     * 多线程处理文件加载,使用 fast-fail 策略
     */
private void moveFiles(FileLoadContext context, List<FileData> fileDatas, File rootDir) {
    Exception exception = null;
    adjustPoolSize(context);
    ExecutorCompletionService<Exception> executorComplition = new ExecutorCompletionService<Exception>(executor);
    List<Future<Exception>> results = new ArrayList<Future<Exception>>();
    for (FileData fileData : fileDatas) {
        Future<Exception> future = executorComplition.submit(new FileLoadWorker(context, rootDir, fileData));
        results.add(future);
        // fast fail
        if (future.isDone()) {
            // 如果是自己执行的任务(线程池采用 CallerRunsPolicy),则立刻进行检查
            try {
                exception = future.get();
            } catch (Exception e) {
                exception = e;
            }
            if (exception != null) {
                for (Future<Exception> result : results) {
                    if (!result.isDone() && !result.isCancelled()) {
                        result.cancel(true);
                    }
                }
                throw exception instanceof LoadException ? (LoadException) exception : new LoadException(exception);
            }
        }
    }
    int resultSize = results.size();
    int cursor = 0;
    while (cursor < resultSize) {
        try {
            Future<Exception> result = executorComplition.take();
            exception = result.get();
        } catch (Exception e) {
            exception = e;
            break;
        }
        cursor++;
    }
    if (cursor != resultSize) {
        // 发现任务出错,立刻把正在进行的任务取消
        for (Future<Exception> future : results) {
            if (!future.isDone() && !future.isCancelled()) {
                future.cancel(true);
            }
        }
    }
    if (exception != null) {
        throw exception instanceof LoadException ? (LoadException) exception : new LoadException(exception);
    }
}
Also used : ArrayList(java.util.ArrayList) Future(java.util.concurrent.Future) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) FileData(com.alibaba.otter.shared.etl.model.FileData) LoadException(com.alibaba.otter.node.etl.load.exception.LoadException) IOException(java.io.IOException) LoadException(com.alibaba.otter.node.etl.load.exception.LoadException)

Aggregations

ExecutorCompletionService (java.util.concurrent.ExecutorCompletionService)58 ExecutionException (java.util.concurrent.ExecutionException)27 ExecutorService (java.util.concurrent.ExecutorService)27 ArrayList (java.util.ArrayList)26 IOException (java.io.IOException)23 Future (java.util.concurrent.Future)18 Test (org.junit.Test)12 InterruptedIOException (java.io.InterruptedIOException)9 List (java.util.List)8 Path (org.apache.hadoop.fs.Path)8 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)6 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)6 Callable (java.util.concurrent.Callable)5 HashMap (java.util.HashMap)4 TimeoutException (java.util.concurrent.TimeoutException)4 File (java.io.File)3 Random (java.util.Random)3 FileData (com.alibaba.otter.shared.etl.model.FileData)2 UnknownHostException (java.net.UnknownHostException)2 Path (java.nio.file.Path)2