use of java.nio.file.Files in project component-runtime by Talend.
the class BaseSpark method submitClasspath.
/**
* Same as {@link BaseSpark#submit(Class, String...)} but automatically
* set {@code --jars} arguments and bundle on the fly folders into jars.
*
* @param main
* the main to execute.
* @param args
* potential arguments to pass to spark submit.
*/
public void submitClasspath(final Class<?> main, final Predicate<File> classpathFilter, final String... args) {
final Set<File> files;
try {
final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
files = new UrlSet(ClassLoaders.findUrls(contextClassLoader)).excludeJvm().getUrls().stream().map(ClassLoaders::toFile).collect(toSet());
} catch (final IOException e) {
throw new IllegalArgumentException(e);
}
final String classpath = files.stream().filter(classpathFilter).map(file -> {
if (file.isDirectory()) {
// bundle it to let spark submit it
return config.get().jarCache.computeIfAbsent(file, dir -> {
final File cache = new File(getRoot(), file.getName() + "_generated_" + System.nanoTime() + ".jar");
try (final JarOutputStream out = new JarOutputStream(new FileOutputStream(cache))) {
zip(file, out, "");
} catch (final IOException e) {
fail(e.getMessage());
}
return cache;
}).getAbsolutePath();
}
return file.getAbsolutePath();
}).collect(joining(File.pathSeparator));
submit(main, Stream.concat(args == null ? Stream.empty() : Stream.of(args), Stream.of("--jars", classpath)).toArray(String[]::new));
}
use of java.nio.file.Files in project samza by apache.
the class BlobStoreRestoreManager method deleteUnusedStoresFromBlobStore.
/**
* Deletes blob store contents for stores that were present in the last checkpoint but are either no longer
* present in job configs (removed by user since last deployment) or are no longer configured to be backed
* up using blob stores.
*
* This method blocks until all the necessary store contents and snapshot index blobs have been marked for deletion.
*/
@VisibleForTesting
static void deleteUnusedStoresFromBlobStore(String jobName, String jobId, String taskName, StorageConfig storageConfig, BlobStoreConfig blobStoreConfig, Map<String, Pair<String, SnapshotIndex>> initialStoreSnapshotIndexes, BlobStoreUtil blobStoreUtil, ExecutorService executor) {
List<String> storesToBackup = storageConfig.getStoresWithBackupFactory(BlobStoreStateBackendFactory.class.getName());
List<String> storesToRestore = storageConfig.getStoresWithRestoreFactory(BlobStoreStateBackendFactory.class.getName());
List<CompletionStage<Void>> storeDeletionFutures = new ArrayList<>();
initialStoreSnapshotIndexes.forEach((storeName, scmAndSnapshotIndex) -> {
if (!storesToBackup.contains(storeName) && !storesToRestore.contains(storeName)) {
LOG.debug("Removing task: {} store: {} from blob store. It is either no longer used, " + "or is no longer configured to be backed up or restored with blob store.", taskName, storeName);
DirIndex dirIndex = scmAndSnapshotIndex.getRight().getDirIndex();
Metadata requestMetadata = new Metadata(Metadata.SNAPSHOT_INDEX_PAYLOAD_PATH, Optional.empty(), jobName, jobId, taskName, storeName);
CompletionStage<Void> storeDeletionFuture = // delete files and sub-dirs previously marked for removal
blobStoreUtil.cleanUpDir(dirIndex, requestMetadata).thenComposeAsync(v -> blobStoreUtil.deleteDir(dirIndex, requestMetadata), // deleted files and dirs still present
executor).thenComposeAsync(v -> blobStoreUtil.deleteSnapshotIndexBlob(scmAndSnapshotIndex.getLeft(), requestMetadata), // delete the snapshot index blob
executor);
storeDeletionFutures.add(storeDeletionFuture);
}
});
FutureUtil.allOf(storeDeletionFutures).join();
}
use of java.nio.file.Files in project samza by apache.
the class TestBlobStoreUtil method testRestoreDirRestoresMultiPartFilesCorrectly.
@Test
public void testRestoreDirRestoresMultiPartFilesCorrectly() throws IOException {
Path restoreDirBasePath = Files.createTempDirectory(BlobStoreTestUtil.TEMP_DIR_PREFIX);
// remote file == 26 blobs, blob ids from a to z, blob contents from a to z, offsets 0 to 25.
DirIndex mockDirIndex = mock(DirIndex.class);
when(mockDirIndex.getDirName()).thenReturn(DirIndex.ROOT_DIR_NAME);
FileIndex mockFileIndex = mock(FileIndex.class);
when(mockFileIndex.getFileName()).thenReturn("1.sst");
// setup mock file attributes. create a temp file to get current user/group/permissions so that they
// match with restored files.
File tmpFile = Paths.get(restoreDirBasePath.toString(), "tempfile-" + new Random().nextInt()).toFile();
tmpFile.createNewFile();
PosixFileAttributes attrs = Files.readAttributes(tmpFile.toPath(), PosixFileAttributes.class);
FileMetadata fileMetadata = new // ctime mtime does not matter. size == 26
FileMetadata(// ctime mtime does not matter. size == 26
1234L, // ctime mtime does not matter. size == 26
1243L, // ctime mtime does not matter. size == 26
26, attrs.owner().getName(), attrs.group().getName(), PosixFilePermissions.toString(attrs.permissions()));
when(mockFileIndex.getFileMetadata()).thenReturn(fileMetadata);
// delete so that it doesn't show up in restored dir contents.
Files.delete(tmpFile.toPath());
List<FileBlob> mockFileBlobs = new ArrayList<>();
StringBuilder fileContents = new StringBuilder();
for (int i = 0; i < 26; i++) {
FileBlob mockFileBlob = mock(FileBlob.class);
char c = (char) ('a' + i);
// blob contents == blobId
fileContents.append(c);
when(mockFileBlob.getBlobId()).thenReturn(String.valueOf(c));
when(mockFileBlob.getOffset()).thenReturn(i);
mockFileBlobs.add(mockFileBlob);
}
when(mockFileIndex.getBlobs()).thenReturn(mockFileBlobs);
CRC32 checksum = new CRC32();
checksum.update(fileContents.toString().getBytes());
when(mockFileIndex.getChecksum()).thenReturn(checksum.getValue());
when(mockDirIndex.getFilesPresent()).thenReturn(ImmutableList.of(mockFileIndex));
BlobStoreManager mockBlobStoreManager = mock(BlobStoreManager.class);
when(mockBlobStoreManager.get(anyString(), any(OutputStream.class), any(Metadata.class))).thenAnswer((Answer<CompletionStage<Void>>) invocationOnMock -> {
String blobId = invocationOnMock.getArgumentAt(0, String.class);
OutputStream outputStream = invocationOnMock.getArgumentAt(1, OutputStream.class);
outputStream.write(blobId.getBytes());
((FileOutputStream) outputStream).getFD().sync();
return CompletableFuture.completedFuture(null);
});
BlobStoreUtil blobStoreUtil = new BlobStoreUtil(mockBlobStoreManager, EXECUTOR, null, null);
blobStoreUtil.restoreDir(restoreDirBasePath.toFile(), mockDirIndex, metadata).join();
assertTrue(new DirDiffUtil().areSameDir(Collections.emptySet(), false).test(restoreDirBasePath.toFile(), mockDirIndex));
}
use of java.nio.file.Files in project FXyzLib by Birdasaur.
the class CSVScatter3DTest method start.
@Override
public void start(Stage primaryStage) throws Exception {
Group sceneRoot = new Group();
Scene scene = new Scene(sceneRoot, sceneWidth, sceneHeight, true, SceneAntialiasing.BALANCED);
scene.setFill(Color.BLACK);
camera = new PerspectiveCamera(true);
// setup camera transform for rotational support
cameraTransform.setTranslate(0, 0, 0);
cameraTransform.getChildren().add(camera);
camera.setNearClip(0.1);
camera.setFarClip(10000.0);
camera.setTranslateX(0);
camera.setTranslateZ(-1000);
cameraTransform.ry.setAngle(-25.0);
cameraTransform.rx.setAngle(-10.0);
// add a Point Light for better viewing of the grid coordinate system
PointLight light = new PointLight(Color.WHITE);
cameraTransform.getChildren().add(new AmbientLight());
light.setTranslateX(camera.getTranslateX());
light.setTranslateY(camera.getTranslateY());
light.setTranslateZ(camera.getTranslateZ());
scene.setCamera(camera);
long time = System.currentTimeMillis();
Group group = new Group(cameraTransform);
List<Point3D> data = new ArrayList<>();
// // create some data
// IntStream.range(0,100000)
// .forEach(i->data.add(new Point3D((float)(30*Math.sin(50*i)),
// (float)(Math.sin(i)*(100+30*Math.cos(100*i))),
// (float)(Math.cos(i)*(100+30*Math.cos(200*i))),
// i))); // <-- f
// // and write to csv file
// Path out = Paths.get("output.txt");
// Files.write(out,data.stream().map(p3d->p3d.toCSV()).collect(Collectors.toList()),Charset.defaultCharset());
// read from csv file
Path out = getCSVFile(0);
if (out != null) {
Files.lines(out).map(s -> s.split(";")).forEach(s -> data.add(new Point3D(Float.parseFloat(s[0]), Float.parseFloat(s[1]), Float.parseFloat(s[2]), Float.parseFloat(s[3]))));
}
ScatterMesh scatter = new ScatterMesh(data, true, 1, 0);
// DENSITY
// texture is given by p.f value, don't change this!
scatter.setTextureModeVertices3D(1530, p -> p.f);
group.getChildren().add(scatter);
sceneRoot.getChildren().addAll(group);
// First person shooter keyboard movement
scene.setOnKeyPressed(event -> {
double change = 10.0;
// Add shift modifier to simulate "Running Speed"
if (event.isShiftDown()) {
change = 50.0;
}
// What key did the user press?
KeyCode keycode = event.getCode();
// Step 2c: Add Zoom controls
if (keycode == KeyCode.W) {
camera.setTranslateZ(camera.getTranslateZ() + change);
}
if (keycode == KeyCode.S) {
camera.setTranslateZ(camera.getTranslateZ() - change);
}
// Step 2d: Add Strafe controls
if (keycode == KeyCode.A) {
camera.setTranslateX(camera.getTranslateX() - change);
}
if (keycode == KeyCode.D) {
camera.setTranslateX(camera.getTranslateX() + change);
}
});
scene.setOnMousePressed((MouseEvent me) -> {
mousePosX = me.getSceneX();
mousePosY = me.getSceneY();
mouseOldX = me.getSceneX();
mouseOldY = me.getSceneY();
});
scene.setOnMouseDragged((MouseEvent me) -> {
mouseOldX = mousePosX;
mouseOldY = mousePosY;
mousePosX = me.getSceneX();
mousePosY = me.getSceneY();
mouseDeltaX = (mousePosX - mouseOldX);
mouseDeltaY = (mousePosY - mouseOldY);
double modifier = 10.0;
double modifierFactor = 0.1;
if (me.isControlDown()) {
modifier = 0.1;
}
if (me.isShiftDown()) {
modifier = 50.0;
}
if (me.isPrimaryButtonDown()) {
// +
cameraTransform.ry.setAngle(((cameraTransform.ry.getAngle() + mouseDeltaX * modifierFactor * modifier * 2.0) % 360 + 540) % 360 - 180);
// -
cameraTransform.rx.setAngle(((cameraTransform.rx.getAngle() - mouseDeltaY * modifierFactor * modifier * 2.0) % 360 + 540) % 360 - 180);
} else if (me.isSecondaryButtonDown()) {
double z = camera.getTranslateZ();
double newZ = z + mouseDeltaX * modifierFactor * modifier;
camera.setTranslateZ(newZ);
} else if (me.isMiddleButtonDown()) {
// -
cameraTransform.t.setX(cameraTransform.t.getX() + mouseDeltaX * modifierFactor * modifier * 0.3);
// -
cameraTransform.t.setY(cameraTransform.t.getY() + mouseDeltaY * modifierFactor * modifier * 0.3);
}
});
primaryStage.setTitle("F(X)yz - ScatterMesh Test");
primaryStage.setScene(scene);
primaryStage.show();
final boolean constantVertices = true;
lastEffect = System.nanoTime();
AtomicInteger count = new AtomicInteger(0);
List<List<Number>> fullData = new ArrayList<>();
if (constantVertices) {
// if possible we can cache all the data
Stream.of(0, 1, 2, 3, 4, 3, 2, 1).forEach(i -> {
Path out2 = getCSVFile(i);
if (out2 != null) {
try {
List<Number> data2 = new ArrayList<>();
Files.lines(out2).map(s -> s.split(";")).forEach(s -> {
float f = Float.parseFloat(s[3]);
// 4 vertices tetrahedra
data2.add(f);
data2.add(f);
data2.add(f);
data2.add(f);
});
fullData.add(data2);
} catch (IOException ex) {
}
}
});
}
AnimationTimer timerEffect = new AnimationTimer() {
@Override
public void handle(long now) {
if (now > lastEffect + 50_000_000l) {
try {
// long t=System.currentTimeMillis();
if (constantVertices && fullData != null) {
// Vertices coordinates are always the same: mesh is tha same, we only
// need to update F on each element
scatter.setFunctionData(fullData.get(count.get() % 8));
// System.out.println("t "+(System.currentTimeMillis()-t));
} else {
// vertices coordinates may change in time, we need to create them all over again reading the files:
Path out2 = getCSVFile((int) (Stream.of(0, 1, 2, 3, 4, 3, 2, 1).toArray()[count.get() % 8]));
if (out2 != null) {
List<Point3D> data2 = new ArrayList<>();
Files.lines(out2).map(s -> s.split(";")).forEach(s -> data2.add(new Point3D(Float.parseFloat(s[0]), Float.parseFloat(s[1]), Float.parseFloat(s[2]), Float.parseFloat(s[3]))));
scatter.setScatterData(data2);
scatter.setTextureModeVertices1D(1530, p -> p);
}
// System.out.println("t "+(System.currentTimeMillis()-t));
}
} catch (IOException ex) {
}
count.getAndIncrement();
lastEffect = now;
}
}
};
timerEffect.start();
}
use of java.nio.file.Files in project cassandra by apache.
the class CassandraDaemon method migrateSystemDataIfNeeded.
/**
* Checks if the data of the local system keyspaces need to be migrated to a different location.
*
* @throws IOException
*/
public void migrateSystemDataIfNeeded() throws IOException {
// anything. If it is not the case we want to try to migrate the data.
if (!DatabaseDescriptor.useSpecificLocationForLocalSystemData() && DatabaseDescriptor.getNonLocalSystemKeyspacesDataFileLocations().length <= 1)
return;
// We can face several cases:
// 1) The system data are spread accross the data file locations and need to be moved to
// the first data location (upgrade to 4.0)
// 2) The system data are spread accross the data file locations and need to be moved to
// the system keyspace location configured by the user (upgrade to 4.0)
// 3) The system data are stored in the first data location and need to be moved to
// the system keyspace location configured by the user (system_data_file_directory has been configured)
Path target = Paths.get(DatabaseDescriptor.getLocalSystemKeyspacesDataFileLocations()[0]);
String[] nonLocalSystemKeyspacesFileLocations = DatabaseDescriptor.getNonLocalSystemKeyspacesDataFileLocations();
String[] sources = DatabaseDescriptor.useSpecificLocationForLocalSystemData() ? nonLocalSystemKeyspacesFileLocations : Arrays.copyOfRange(nonLocalSystemKeyspacesFileLocations, 1, nonLocalSystemKeyspacesFileLocations.length);
for (String source : sources) {
Path dataFileLocation = Paths.get(source);
if (!Files.exists(dataFileLocation))
continue;
try (Stream<Path> locationChildren = Files.list(dataFileLocation)) {
Path[] keyspaceDirectories = locationChildren.filter(p -> SchemaConstants.isLocalSystemKeyspace(p.getFileName().toString())).toArray(Path[]::new);
for (Path keyspaceDirectory : keyspaceDirectories) {
try (Stream<Path> keyspaceChildren = Files.list(keyspaceDirectory)) {
Path[] tableDirectories = keyspaceChildren.filter(Files::isDirectory).filter(p -> !SystemKeyspace.TABLES_SPLIT_ACROSS_MULTIPLE_DISKS.contains(p.getFileName().toString())).toArray(Path[]::new);
for (Path tableDirectory : tableDirectories) {
FileUtils.moveRecursively(tableDirectory, target.resolve(dataFileLocation.relativize(tableDirectory)));
}
if (!SchemaConstants.SYSTEM_KEYSPACE_NAME.equals(keyspaceDirectory.getFileName().toString())) {
FileUtils.deleteDirectoryIfEmpty(keyspaceDirectory);
}
}
}
}
}
}
Aggregations