Search in sources :

Example 36 with ConnectionCanceledException

use of ch.cyberduck.core.exception.ConnectionCanceledException in project cyberduck by iterate-ch.

the class DAVSession method login.

@Override
public void login(final Proxy proxy, final LoginCallback prompt, final CancelCallback cancel) throws BackgroundException {
    final CredentialsProvider provider = new BasicCredentialsProvider();
    if (preferences.getBoolean("webdav.ntlm.windows.authentication.enable") && WinHttpClients.isWinAuthAvailable()) {
        provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.NTLM), new WindowsCredentialsProvider(new BasicCredentialsProvider()).getCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.NTLM)));
        provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO), new WindowsCredentialsProvider(new SystemDefaultCredentialsProvider()).getCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO)));
    } else {
        provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.NTLM), new NTCredentials(host.getCredentials().getUsername(), host.getCredentials().getPassword(), preferences.getProperty("webdav.ntlm.workstation"), preferences.getProperty("webdav.ntlm.domain")));
        provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO), new NTCredentials(host.getCredentials().getUsername(), host.getCredentials().getPassword(), preferences.getProperty("webdav.ntlm.workstation"), preferences.getProperty("webdav.ntlm.domain")));
    }
    provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.BASIC), new UsernamePasswordCredentials(host.getCredentials().getUsername(), host.getCredentials().getPassword()));
    provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.DIGEST), new UsernamePasswordCredentials(host.getCredentials().getUsername(), host.getCredentials().getPassword()));
    provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.KERBEROS), new UsernamePasswordCredentials(host.getCredentials().getUsername(), host.getCredentials().getPassword()));
    client.setCredentials(provider);
    if (preferences.getBoolean("webdav.basic.preemptive")) {
        switch(proxy.getType()) {
            case DIRECT:
            case SOCKS:
                // Enable preemptive authentication. See HttpState#setAuthenticationPreemptive
                client.enablePreemptiveAuthentication(host.getHostname(), host.getPort(), host.getPort(), Charset.forName(preferences.getProperty("http.credentials.charset")));
                break;
            default:
                client.disablePreemptiveAuthentication();
        }
    } else {
        client.disablePreemptiveAuthentication();
    }
    if (host.getCredentials().isPassed()) {
        log.warn(String.format("Skip verifying credentials with previous successful authentication event for %s", this));
        return;
    }
    try {
        final Path home = new DelegatingHomeFeature(new WorkdirHomeFeature(host), new DefaultPathHomeFeature(host)).find();
        final HttpHead head = new HttpHead(new DAVPathEncoder().encode(home));
        try {
            client.execute(head, new MicrosoftIISFeaturesResponseHandler());
        } catch (SardineException e) {
            switch(e.getStatusCode()) {
                case HttpStatus.SC_NOT_FOUND:
                    log.warn(String.format("Ignore failure %s", e));
                    break;
                case HttpStatus.SC_NOT_IMPLEMENTED:
                case HttpStatus.SC_FORBIDDEN:
                case HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE:
                case HttpStatus.SC_METHOD_NOT_ALLOWED:
                    log.warn(String.format("Failed HEAD request to %s with %s. Retry with PROPFIND.", host, e.getResponsePhrase()));
                    cancel.verify();
                    // Possibly only HEAD requests are not allowed
                    list.list(home, new DisabledListProgressListener() {

                        @Override
                        public void chunk(final Path parent, final AttributedList<Path> list) throws ListCanceledException {
                            try {
                                cancel.verify();
                            } catch (ConnectionCanceledException e) {
                                throw new ListCanceledException(list, e);
                            }
                        }
                    });
                    break;
                case HttpStatus.SC_BAD_REQUEST:
                    if (preferences.getBoolean("webdav.basic.preemptive")) {
                        log.warn(String.format("Disable preemptive authentication for %s due to failure %s", host, e.getResponsePhrase()));
                        cancel.verify();
                        client.disablePreemptiveAuthentication();
                        client.execute(head, new MicrosoftIISFeaturesResponseHandler());
                    } else {
                        throw new DAVExceptionMappingService().map(e);
                    }
                    break;
                default:
                    throw new DAVExceptionMappingService().map(e);
            }
        }
    } catch (SardineException e) {
        throw new DAVExceptionMappingService().map(e);
    } catch (IOException e) {
        throw new HttpExceptionMappingService().map(e);
    }
}
Also used : Path(ch.cyberduck.core.Path) BasicCredentialsProvider(org.apache.http.impl.client.BasicCredentialsProvider) DelegatingHomeFeature(ch.cyberduck.core.shared.DelegatingHomeFeature) DisabledListProgressListener(ch.cyberduck.core.DisabledListProgressListener) ConnectionCanceledException(ch.cyberduck.core.exception.ConnectionCanceledException) DefaultPathHomeFeature(ch.cyberduck.core.shared.DefaultPathHomeFeature) WindowsCredentialsProvider(org.apache.http.impl.auth.win.WindowsCredentialsProvider) BasicCredentialsProvider(org.apache.http.impl.client.BasicCredentialsProvider) CredentialsProvider(org.apache.http.client.CredentialsProvider) SystemDefaultCredentialsProvider(org.apache.http.impl.client.SystemDefaultCredentialsProvider) IOException(java.io.IOException) WindowsCredentialsProvider(org.apache.http.impl.auth.win.WindowsCredentialsProvider) HttpHead(org.apache.http.client.methods.HttpHead) NTCredentials(org.apache.http.auth.NTCredentials) UsernamePasswordCredentials(org.apache.http.auth.UsernamePasswordCredentials) SardineException(com.github.sardine.impl.SardineException) HttpExceptionMappingService(ch.cyberduck.core.http.HttpExceptionMappingService) AttributedList(ch.cyberduck.core.AttributedList) AuthScope(org.apache.http.auth.AuthScope) SystemDefaultCredentialsProvider(org.apache.http.impl.client.SystemDefaultCredentialsProvider) WorkdirHomeFeature(ch.cyberduck.core.shared.WorkdirHomeFeature) ListCanceledException(ch.cyberduck.core.exception.ListCanceledException)

Example 37 with ConnectionCanceledException

use of ch.cyberduck.core.exception.ConnectionCanceledException in project cyberduck by iterate-ch.

the class MoveWorker method compile.

protected Map<Path, Path> compile(final Move move, final ListService list, final Path source, final Path target) throws BackgroundException {
    // Compile recursive list
    final Map<Path, Path> recursive = new LinkedHashMap<>();
    recursive.put(source, target);
    if (source.isDirectory()) {
        if (!move.isRecursive(source, target)) {
            // sort ascending by timestamp to move older versions first
            final AttributedList<Path> children = list.list(source, new WorkerListProgressListener(this, listener)).filter(new VersionsComparator(true));
            for (Path child : children) {
                if (this.isCanceled()) {
                    throw new ConnectionCanceledException();
                }
                recursive.putAll(this.compile(move, list, child, new Path(target, child.getName(), child.getType())));
            }
        }
    }
    return recursive;
}
Also used : Path(ch.cyberduck.core.Path) ConnectionCanceledException(ch.cyberduck.core.exception.ConnectionCanceledException) LinkedHashMap(java.util.LinkedHashMap)

Example 38 with ConnectionCanceledException

use of ch.cyberduck.core.exception.ConnectionCanceledException in project cyberduck by iterate-ch.

the class ReadDistributionWorker method run.

@Override
public Distribution run(final Session<?> session) throws BackgroundException {
    final DistributionConfiguration cdn = session.getFeature(DistributionConfiguration.class);
    final PathContainerService container = session.getFeature(PathContainerService.class);
    for (Path c : this.getContainers(container, files)) {
        if (this.isCanceled()) {
            throw new ConnectionCanceledException();
        }
        final Distribution distribution = cdn.read(c, method, prompt);
        if (cdn.getFeature(Index.class, distribution.getMethod()) != null) {
            // Make sure container items are cached for default root object.
            distribution.setRootDocuments(session.getFeature(ListService.class).list(container.getContainer(c), new DisabledListProgressListener()).toList());
        }
        return distribution;
    }
    return this.initialize();
}
Also used : Path(ch.cyberduck.core.Path) ListService(ch.cyberduck.core.ListService) PathContainerService(ch.cyberduck.core.PathContainerService) DisabledListProgressListener(ch.cyberduck.core.DisabledListProgressListener) ConnectionCanceledException(ch.cyberduck.core.exception.ConnectionCanceledException) Distribution(ch.cyberduck.core.cdn.Distribution) DistributionConfiguration(ch.cyberduck.core.cdn.DistributionConfiguration) Index(ch.cyberduck.core.cdn.features.Index)

Example 39 with ConnectionCanceledException

use of ch.cyberduck.core.exception.ConnectionCanceledException in project cyberduck by iterate-ch.

the class TransferPromptFilterWorker method run.

@Override
public Map<TransferItem, TransferStatus> run(final Session<?> session) throws BackgroundException {
    final Map<TransferItem, TransferStatus> status = new HashMap<>();
    final TransferPathFilter filter = transfer.filter(session, session, action, listener);
    if (log.isDebugEnabled()) {
        log.debug(String.format("Filter cache %s with filter %s", cache, filter));
    }
    // Unordered list
    for (Map.Entry<TransferItem, AttributedList<TransferItem>> entry : cache.asMap().entrySet()) {
        if (this.isCanceled()) {
            throw new ConnectionCanceledException();
        }
        final AttributedList<TransferItem> list = entry.getValue();
        for (TransferItem file : list) {
            if (this.isCanceled()) {
                throw new ConnectionCanceledException();
            }
            final boolean accept = filter.accept(file.remote, file.local, new TransferStatus().exists(true));
            status.put(file, filter.prepare(file.remote, file.local, new TransferStatus().exists(true), listener).reject(!accept));
        }
    }
    return status;
}
Also used : AttributedList(ch.cyberduck.core.AttributedList) HashMap(java.util.HashMap) ConnectionCanceledException(ch.cyberduck.core.exception.ConnectionCanceledException) TransferStatus(ch.cyberduck.core.transfer.TransferStatus) TransferPathFilter(ch.cyberduck.core.transfer.TransferPathFilter) TransferItem(ch.cyberduck.core.transfer.TransferItem) HashMap(java.util.HashMap) Map(java.util.Map)

Example 40 with ConnectionCanceledException

use of ch.cyberduck.core.exception.ConnectionCanceledException in project cyberduck by iterate-ch.

the class SDSDirectS3UploadFeature method upload.

@Override
public Node upload(final Path file, final Local local, final BandwidthThrottle throttle, final StreamListener listener, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
    final ThreadPool pool = ThreadPoolFactory.get("multipart", concurrency);
    try {
        final CreateFileUploadRequest createFileUploadRequest = new CreateFileUploadRequest().directS3Upload(true).timestampModification(status.getTimestamp() != null ? new DateTime(status.getTimestamp()) : null).size(TransferStatus.UNKNOWN_LENGTH == status.getLength() ? null : status.getLength()).parentId(Long.parseLong(nodeid.getVersionId(file.getParent(), new DisabledListProgressListener()))).name(file.getName());
        final CreateFileUploadResponse createFileUploadResponse = new NodesApi(session.getClient()).createFileUploadChannel(createFileUploadRequest, StringUtils.EMPTY);
        if (log.isDebugEnabled()) {
            log.debug(String.format("upload started for %s with response %s", file, createFileUploadResponse));
        }
        final Map<Integer, TransferStatus> etags = new HashMap<>();
        final List<PresignedUrl> presignedUrls = this.retrievePresignedUrls(createFileUploadResponse, status);
        final List<Future<TransferStatus>> parts = new ArrayList<>();
        final InputStream in;
        final String random = new UUIDRandomStringService().random();
        if (SDSNodeIdProvider.isEncrypted(file)) {
            in = new SDSTripleCryptEncryptorFeature(session, nodeid).encrypt(file, local.getInputStream(), status);
        } else {
            in = local.getInputStream();
        }
        try {
            // Full size of file
            final long size = status.getLength() + status.getOffset();
            long offset = 0;
            long remaining = status.getLength();
            for (int partNumber = 1; remaining >= 0; partNumber++) {
                final long length = Math.min(Math.max((size / (MAXIMUM_UPLOAD_PARTS - 1)), partsize), remaining);
                final PresignedUrl presignedUrl = presignedUrls.get(partNumber - 1);
                if (SDSNodeIdProvider.isEncrypted(file)) {
                    final Local temporary = temp.create(String.format("%s-%d", random, partNumber));
                    if (log.isDebugEnabled()) {
                        log.debug(String.format("Encrypted contents for part %d to %s", partNumber, temporary));
                    }
                    new StreamCopier(status, StreamProgress.noop).withAutoclose(false).withLimit(length).transfer(in, new BufferOutputStream(new FileBuffer(temporary)));
                    parts.add(this.submit(pool, file, temporary, throttle, listener, status, presignedUrl.getUrl(), presignedUrl.getPartNumber(), 0L, length, callback));
                } else {
                    parts.add(this.submit(pool, file, local, throttle, listener, status, presignedUrl.getUrl(), presignedUrl.getPartNumber(), offset, length, callback));
                }
                remaining -= length;
                offset += length;
                if (0L == remaining) {
                    break;
                }
            }
        } finally {
            in.close();
        }
        for (Future<TransferStatus> future : parts) {
            try {
                final TransferStatus part = future.get();
                etags.put(part.getPart(), part);
            } catch (InterruptedException e) {
                log.error("Part upload failed with interrupt failure");
                status.setCanceled();
                throw new ConnectionCanceledException(e);
            } catch (ExecutionException e) {
                log.warn(String.format("Part upload failed with execution failure %s", e.getMessage()));
                if (e.getCause() instanceof BackgroundException) {
                    throw (BackgroundException) e.getCause();
                }
                throw new BackgroundException(e.getCause());
            }
        }
        final CompleteS3FileUploadRequest completeS3FileUploadRequest = new CompleteS3FileUploadRequest().keepShareLinks(status.isExists() ? new HostPreferences(session.getHost()).getBoolean("sds.upload.sharelinks.keep") : false).resolutionStrategy(status.isExists() ? CompleteS3FileUploadRequest.ResolutionStrategyEnum.OVERWRITE : CompleteS3FileUploadRequest.ResolutionStrategyEnum.FAIL);
        if (status.getFilekey() != null) {
            final ObjectReader reader = session.getClient().getJSON().getContext(null).readerFor(FileKey.class);
            final FileKey fileKey = reader.readValue(status.getFilekey().array());
            final EncryptedFileKey encryptFileKey = Crypto.encryptFileKey(TripleCryptConverter.toCryptoPlainFileKey(fileKey), TripleCryptConverter.toCryptoUserPublicKey(session.keyPair().getPublicKeyContainer()));
            completeS3FileUploadRequest.setFileKey(TripleCryptConverter.toSwaggerFileKey(encryptFileKey));
        }
        etags.forEach((key, value) -> completeS3FileUploadRequest.addPartsItem(new S3FileUploadPart().partEtag(value.getChecksum().hash).partNumber(key)));
        if (log.isDebugEnabled()) {
            log.debug(String.format("Complete file upload with %s for %s", completeS3FileUploadRequest, file));
        }
        new NodesApi(session.getClient()).completeS3FileUpload(completeS3FileUploadRequest, createFileUploadResponse.getUploadId(), StringUtils.EMPTY);
        // Polling
        final ScheduledThreadPool polling = new ScheduledThreadPool();
        final CountDownLatch done = new CountDownLatch(1);
        final AtomicReference<BackgroundException> failure = new AtomicReference<>();
        final ScheduledFuture f = polling.repeat(new Runnable() {

            @Override
            public void run() {
                try {
                    if (log.isDebugEnabled()) {
                        log.debug(String.format("Query upload status for %s", createFileUploadResponse));
                    }
                    final S3FileUploadStatus uploadStatus = new NodesApi(session.getClient()).requestUploadStatusFiles(createFileUploadResponse.getUploadId(), StringUtils.EMPTY, null);
                    switch(uploadStatus.getStatus()) {
                        case "finishing":
                            // Expected
                            break;
                        case "transfer":
                            failure.set(new InteroperabilityException(uploadStatus.getStatus()));
                            done.countDown();
                            break;
                        case "error":
                            failure.set(new InteroperabilityException(uploadStatus.getErrorDetails().getMessage()));
                            done.countDown();
                            break;
                        case "done":
                            // Set node id in transfer status
                            nodeid.cache(file, String.valueOf(uploadStatus.getNode().getId()));
                            // Mark parent status as complete
                            status.withResponse(new SDSAttributesAdapter(session).toAttributes(uploadStatus.getNode())).setComplete();
                            done.countDown();
                            break;
                    }
                } catch (ApiException e) {
                    failure.set(new SDSExceptionMappingService(nodeid).map("Upload {0} failed", e, file));
                    done.countDown();
                }
            }
        }, new HostPreferences(session.getHost()).getLong("sds.upload.s3.status.period"), TimeUnit.MILLISECONDS);
        Uninterruptibles.awaitUninterruptibly(done);
        polling.shutdown();
        if (null != failure.get()) {
            throw failure.get();
        }
        return null;
    } catch (CryptoSystemException | InvalidFileKeyException | InvalidKeyPairException | UnknownVersionException e) {
        throw new TripleCryptExceptionMappingService().map("Upload {0} failed", e, file);
    } catch (ApiException e) {
        throw new SDSExceptionMappingService(nodeid).map("Upload {0} failed", e, file);
    } catch (IOException e) {
        throw new DefaultIOExceptionMappingService().map(e);
    } finally {
        temp.shutdown();
        // Cancel future tasks
        pool.shutdown(false);
    }
}
Also used : DisabledListProgressListener(ch.cyberduck.core.DisabledListProgressListener) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) DateTime(org.joda.time.DateTime) NodesApi(ch.cyberduck.core.sds.io.swagger.client.api.NodesApi) PresignedUrl(ch.cyberduck.core.sds.io.swagger.client.model.PresignedUrl) ScheduledThreadPool(ch.cyberduck.core.threading.ScheduledThreadPool) CreateFileUploadResponse(ch.cyberduck.core.sds.io.swagger.client.model.CreateFileUploadResponse) InvalidFileKeyException(com.dracoon.sdk.crypto.error.InvalidFileKeyException) S3FileUploadStatus(ch.cyberduck.core.sds.io.swagger.client.model.S3FileUploadStatus) ConnectionCanceledException(ch.cyberduck.core.exception.ConnectionCanceledException) Local(ch.cyberduck.core.Local) HostPreferences(ch.cyberduck.core.preferences.HostPreferences) CreateFileUploadRequest(ch.cyberduck.core.sds.io.swagger.client.model.CreateFileUploadRequest) UnknownVersionException(com.dracoon.sdk.crypto.error.UnknownVersionException) BackgroundException(ch.cyberduck.core.exception.BackgroundException) EncryptedFileKey(com.dracoon.sdk.crypto.model.EncryptedFileKey) FileBuffer(ch.cyberduck.core.io.FileBuffer) ThreadPool(ch.cyberduck.core.threading.ThreadPool) ScheduledThreadPool(ch.cyberduck.core.threading.ScheduledThreadPool) BufferOutputStream(ch.cyberduck.core.io.BufferOutputStream) InvalidKeyPairException(com.dracoon.sdk.crypto.error.InvalidKeyPairException) TransferStatus(ch.cyberduck.core.transfer.TransferStatus) ObjectReader(com.fasterxml.jackson.databind.ObjectReader) ExecutionException(java.util.concurrent.ExecutionException) FileKey(ch.cyberduck.core.sds.io.swagger.client.model.FileKey) EncryptedFileKey(com.dracoon.sdk.crypto.model.EncryptedFileKey) InteroperabilityException(ch.cyberduck.core.exception.InteroperabilityException) InputStream(java.io.InputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) UUIDRandomStringService(ch.cyberduck.core.UUIDRandomStringService) ScheduledFuture(java.util.concurrent.ScheduledFuture) S3FileUploadPart(ch.cyberduck.core.sds.io.swagger.client.model.S3FileUploadPart) ScheduledFuture(java.util.concurrent.ScheduledFuture) Future(java.util.concurrent.Future) TripleCryptExceptionMappingService(ch.cyberduck.core.sds.triplecrypt.TripleCryptExceptionMappingService) DefaultIOExceptionMappingService(ch.cyberduck.core.DefaultIOExceptionMappingService) StreamCopier(ch.cyberduck.core.io.StreamCopier) CompleteS3FileUploadRequest(ch.cyberduck.core.sds.io.swagger.client.model.CompleteS3FileUploadRequest) ApiException(ch.cyberduck.core.sds.io.swagger.client.ApiException) CryptoSystemException(com.dracoon.sdk.crypto.error.CryptoSystemException)

Aggregations

ConnectionCanceledException (ch.cyberduck.core.exception.ConnectionCanceledException)66 Path (ch.cyberduck.core.Path)28 BackgroundException (ch.cyberduck.core.exception.BackgroundException)17 ArrayList (java.util.ArrayList)16 TransferStatus (ch.cyberduck.core.transfer.TransferStatus)12 IOException (java.io.IOException)11 ExecutionException (java.util.concurrent.ExecutionException)11 Future (java.util.concurrent.Future)11 ThreadPool (ch.cyberduck.core.threading.ThreadPool)10 Test (org.junit.Test)9 ListService (ch.cyberduck.core.ListService)8 PathContainerService (ch.cyberduck.core.PathContainerService)8 DisabledListProgressListener (ch.cyberduck.core.DisabledListProgressListener)7 HashMap (java.util.HashMap)7 DefaultIOExceptionMappingService (ch.cyberduck.core.DefaultIOExceptionMappingService)6 IntegrationTest (ch.cyberduck.test.IntegrationTest)6 AttributedList (ch.cyberduck.core.AttributedList)5 Host (ch.cyberduck.core.Host)5 LinkedHashMap (java.util.LinkedHashMap)5 ChecksumException (ch.cyberduck.core.exception.ChecksumException)4