use of com.github.sardine.DavResource in project core-geonetwork by geonetwork.
the class WebDavRetriever method retrieve.
public List<RemoteFile> retrieve() throws Exception {
final HttpClientBuilder clientBuilder = context.getBean(GeonetHttpRequestFactory.class).getDefaultHttpClientBuilder();
Lib.net.setupProxy(context, clientBuilder, new URL(params.url).getHost());
if (params.isUseAccount()) {
this.sardine = new SardineImpl(clientBuilder, params.getUsername(), params.getPassword());
} else {
this.sardine = new SardineImpl(clientBuilder);
}
files.clear();
String url = params.url;
if (!url.endsWith("/")) {
if (log.isDebugEnabled()) {
log.debug("URL " + url + "does not end in slash -- will be appended");
}
url += "/";
}
final List<DavResource> resources = open(url);
url = calculateBaseURL(cancelMonitor, url, resources);
for (DavResource resource : resources) {
if (cancelMonitor.get()) {
return Collections.emptyList();
}
retrieveFile(url, resource);
}
return files;
}
use of com.github.sardine.DavResource in project core-geonetwork by geonetwork.
the class WebDavRetriever method retrieveFile.
private void retrieveFile(String baseURL, DavResource davResource) throws IOException {
if (this.cancelMonitor.get()) {
files.clear();
return;
}
String path = davResource.getPath();
int startSize = files.size();
if (davResource.isDirectory()) {
// it is a directory
if (params.recurse) {
if (log.isDebugEnabled()) {
log.debug(path + " is a collection, processed recursively");
}
for (DavResource resource : sardine.list(baseURL + path)) {
if (!resource.getHref().equals(davResource.getHref())) {
retrieveFile(baseURL, resource);
}
}
} else {
if (log.isDebugEnabled()) {
log.debug(path + " is a collection. Ignoring because recursion is disabled.");
}
}
} else {
// it is a file
if (log.isDebugEnabled()) {
log.debug(path + " is not a collection");
}
final String name = davResource.getName();
if (name.toLowerCase().endsWith(".xml")) {
if (log.isDebugEnabled()) {
log.debug("found xml file ! " + name.toLowerCase());
}
files.add(new WebDavRemoteFile(sardine, baseURL, davResource));
} else {
if (log.isDebugEnabled()) {
log.debug(name.toLowerCase() + " is not an xml file");
}
}
}
int endSize = files.size();
int added = endSize - startSize;
if (added == 0) {
if (log.isDebugEnabled())
log.debug("No xml files found in path : " + path);
} else {
if (log.isDebugEnabled())
log.debug("Found " + added + " xml file(s) in path : " + path);
}
}
use of com.github.sardine.DavResource in project ddf by codice.
the class DavAlterationObserverTest method testLeadingCreate.
@Test
public void testLeadingCreate() throws IOException {
DavResource mockChild0 = mock(DavResource.class);
DavEntry child0 = parent.newChildInstance("/child0");
doReturn("/child0").when(mockChild0).getName();
doReturn(false).when(mockChild0).isDirectory();
doReturn(new Date()).when(mockChild0).getModified();
doReturn(12345L).when(mockChild0).getContentLength();
doReturn("E/0011").when(mockChild0).getEtag();
doReturn(Arrays.asList(mockParent, mockChild1, mockChild0, mockChild3)).when(mockSardine).list(parent.getLocation());
doReturn(Collections.singletonList(mockChild0)).when(mockSardine).list(child0.getLocation());
observer.checkAndNotify(mockSardine);
verify(mockListener, only()).onFileCreate(any());
}
use of com.github.sardine.DavResource in project ddf by codice.
the class DavAlterationObserverTest method testTrailingCreate.
@Test
public void testTrailingCreate() throws IOException {
DavResource mockChild4 = mock(DavResource.class);
DavEntry child4 = parent.newChildInstance("/child4");
doReturn("/child4").when(mockChild4).getName();
doReturn(false).when(mockChild4).isDirectory();
doReturn(new Date()).when(mockChild4).getModified();
doReturn(17L).when(mockChild4).getContentLength();
doReturn("E/0005").when(mockChild4).getEtag();
doReturn(Arrays.asList(mockParent, mockChild1, mockChild4, mockChild3)).when(mockSardine).list(parent.getLocation());
doReturn(Collections.singletonList(mockChild4)).when(mockSardine).list(child4.getLocation());
observer.checkAndNotify(mockSardine);
verify(mockListener, only()).onFileCreate(any());
}
use of com.github.sardine.DavResource in project openhab1-addons by openhab.
the class EventReloaderJob method loadEvents.
/**
* all events which are available must be removed from the oldEventIds list
*
* @param calendarRuntime
* @param oldEventIds
* @throws IOException
* @throws ParserException
*/
public synchronized void loadEvents(final CalendarRuntime calendarRuntime, final List<String> oldEventIds) throws IOException, ParserException {
CalDavConfig config = calendarRuntime.getConfig();
Sardine sardine = Util.getConnection(config);
List<DavResource> list = sardine.list(config.getUrl(), 1, false);
log.trace("before load events : oldeventsid contains : {}", oldEventIds.toString());
for (DavResource resource : list) {
final String filename = Util.getFilename(resource.getName());
try {
if (resource.isDirectory()) {
continue;
}
// an ics file can contain multiple events
// ==> multiple eventcontainers could have the same filename (and different eventid),
// ==>we must not have one of them remaining in oldEventIds var (bad chosen name, cause it's a list of
// oldEventContainers's filename, so with doubles possible)
// or the remaining jobs with this filename will get unscheduled on the "removeDeletedEvents(config,
// oldEventIds)" call (line 136)
oldEventIds.removeAll(Arrays.asList(filename));
// must not be loaded
EventContainer eventContainer = calendarRuntime.getEventContainerByFilename(filename);
final org.joda.time.DateTime lastResourceChangeFS = new org.joda.time.DateTime(resource.getModified());
log.trace("eventContainer found: {}", eventContainer != null);
log.trace("last resource modification: {}", lastResourceChangeFS);
log.trace("last change of already loaded event: {}", eventContainer != null ? eventContainer.getLastChanged() : null);
if (config.isLastModifiedFileTimeStampValid()) {
if (eventContainer != null && !lastResourceChangeFS.isAfter(eventContainer.getLastChanged())) {
// to be created
if (eventContainer.getCalculatedUntil() != null && eventContainer.getCalculatedUntil().isAfter(org.joda.time.DateTime.now().plusMinutes(config.getReloadMinutes()))) {
// the event is calculated as long as the next reload
// interval can handle this
log.trace("skipping resource {}, not changed (calculated until: {})", resource.getName(), eventContainer.getCalculatedUntil());
continue;
}
if (eventContainer.isHistoricEvent()) {
// no more upcoming events, do nothing
log.trace("skipping resource {}, not changed (historic)", resource.getName());
continue;
}
File icsFile = Util.getCacheFile(config.getKey(), filename);
if (icsFile != null && icsFile.exists()) {
FileInputStream fis = new FileInputStream(icsFile);
this.loadEvents(filename, lastResourceChangeFS, fis, config, oldEventIds, false);
fis.close();
continue;
}
}
}
log.debug("loading resource: {} (FSchangedTS not valid)", resource);
// prepare resource url
URL url = new URL(config.getUrl());
String resourcePath = resource.getPath();
String escapedResource = resource.getName().replaceAll("/", "%2F");
resourcePath = resourcePath.replace(resource.getName(), escapedResource);
url = new URL(url.getProtocol(), url.getHost(), url.getPort(), resourcePath);
InputStream inputStream = sardine.get(url.toString().replaceAll(" ", "%20"));
this.loadEvents(filename, lastResourceChangeFS, inputStream, config, oldEventIds, false);
} catch (ParserException e) {
log.error("error parsing ics file: " + filename, e);
} catch (SardineException e) {
log.error("error reading ics file: " + filename, e);
}
}
log.trace("after load events : oldeventsid contains : {}", oldEventIds.toString());
}
Aggregations