use of org.jnbt.NBTInputStream in project WorldPainter by Captain-Chaos.
the class Bo3Object method loadTileEntity.
private static TileEntity loadTileEntity(File bo3File, String nbtFileName) throws IOException {
File nbtFile = new File(bo3File.getParentFile(), nbtFileName);
try (NBTInputStream in = new NBTInputStream(new GZIPInputStream(new FileInputStream(nbtFile)))) {
CompoundTag tag = (CompoundTag) in.readTag();
Map<String, Tag> map = tag.getValue();
if ((map.size() == 1) && (map.values().iterator().next() instanceof CompoundTag)) {
// contains the data
return TileEntity.fromNBT((CompoundTag) tag.getValue().values().iterator().next());
} else {
return TileEntity.fromNBT(tag);
}
}
}
use of org.jnbt.NBTInputStream in project WorldPainter by Captain-Chaos.
the class JavaWorldMerger method mergeBiomes.
/**
* Merge only the biomes, leave everything else the same.
*/
public void mergeBiomes(File backupDir, ProgressReceiver progressReceiver) throws IOException, ProgressReceiver.OperationCancelled {
// Read existing level.dat file and perform sanity checks
Level level = performSanityChecks(true);
// Backup existing level
File worldDir = levelDatFile.getParentFile();
if (!worldDir.renameTo(backupDir)) {
throw new FileInUseException("Could not move " + worldDir + " to " + backupDir);
}
if (!worldDir.mkdirs()) {
throw new IOException("Could not create " + worldDir);
}
// Copy everything that we are not going to generate (this includes the
// Nether and End dimensions)
File[] files = backupDir.listFiles();
// noinspection ConstantConditions // Cannot happen because we previously loaded level.dat from it
for (File file : files) {
if ((!file.getName().equalsIgnoreCase("session.lock")) && (!file.getName().equalsIgnoreCase("region"))) {
if (file.isFile()) {
FileUtils.copyFileToDir(file, worldDir);
} else if (file.isDirectory()) {
FileUtils.copyDir(file, new File(worldDir, file.getName()));
} else {
logger.warn("Not copying " + file + "; not a regular file or directory");
}
}
}
// Write session.lock file
File sessionLockFile = new File(worldDir, "session.lock");
try (DataOutputStream sessionOut = new DataOutputStream(new FileOutputStream(sessionLockFile))) {
sessionOut.writeLong(System.currentTimeMillis());
}
// Process all chunks and copy just the biomes
if (progressReceiver != null) {
progressReceiver.setMessage("Merging biomes");
}
// Find all the region files of the existing level
File oldRegionDir = new File(backupDir, "region");
final Pattern regionFilePattern = Pattern.compile("r\\.-?\\d+\\.-?\\d+\\.mca");
File[] oldRegionFiles = oldRegionDir.listFiles((dir, name) -> regionFilePattern.matcher(name).matches());
// Process each region file, copying every chunk unmodified, except
// for the biomes
// Can only happen for corrupted maps
@SuppressWarnings("ConstantConditions") int totalChunkCount = oldRegionFiles.length * 32 * 32, chunkCount = 0;
File newRegionDir = new File(worldDir, "region");
newRegionDir.mkdirs();
Dimension dimension = world.getDimension(DIM_NORMAL);
for (File file : oldRegionFiles) {
try (RegionFile oldRegion = new RegionFile(file)) {
String[] parts = file.getName().split("\\.");
int regionX = Integer.parseInt(parts[1]);
int regionZ = Integer.parseInt(parts[2]);
File newRegionFile = new File(newRegionDir, "r." + regionX + "." + regionZ + ".mca");
try (RegionFile newRegion = new RegionFile(newRegionFile)) {
for (int x = 0; x < 32; x++) {
for (int z = 0; z < 32; z++) {
if (oldRegion.containsChunk(x, z)) {
ChunkImpl2 chunk;
try (NBTInputStream in = new NBTInputStream(oldRegion.getChunkDataInputStream(x, z))) {
CompoundTag tag = (CompoundTag) in.readTag();
chunk = new ChunkImpl2(tag, level.getMaxHeight());
}
int chunkX = chunk.getxPos(), chunkZ = chunk.getzPos();
for (int xx = 0; xx < 16; xx++) {
for (int zz = 0; zz < 16; zz++) {
chunk.setBiome(xx, zz, dimension.getLayerValueAt(Biome.INSTANCE, (chunkX << 4) | xx, (chunkZ << 4) | zz));
}
}
try (NBTOutputStream out = new NBTOutputStream(newRegion.getChunkDataOutputStream(x, z))) {
out.writeTag(chunk.toNBT());
}
}
chunkCount++;
if (progressReceiver != null) {
progressReceiver.setProgress((float) chunkCount / totalChunkCount);
}
}
}
}
}
}
// Rewrite session.lock file
try (DataOutputStream sessionOut = new DataOutputStream(new FileOutputStream(sessionLockFile))) {
sessionOut.writeLong(System.currentTimeMillis());
}
}
use of org.jnbt.NBTInputStream in project WorldPainter by Captain-Chaos.
the class JavaWorldMerger method thirdPass.
private String thirdPass(MinecraftWorld minecraftWorld, File oldRegionDir, Dimension dimension, Point regionCoords, ProgressReceiver progressReceiver) throws IOException, ProgressReceiver.OperationCancelled {
if (progressReceiver != null) {
progressReceiver.setMessage("Merging existing blocks with new");
}
int lowestChunkX = (regionCoords.x << 5) - 1;
int highestChunkX = (regionCoords.x << 5) + 32;
int lowestChunkY = (regionCoords.y << 5) - 1;
int highestChunkY = (regionCoords.y << 5) + 32;
Platform platform = dimension.getWorld().getPlatform();
int maxHeight = dimension.getMaxHeight();
Map<Point, RegionFile> regionFiles = new HashMap<>();
Set<Point> damagedRegions = new HashSet<>();
StringBuilder reportBuilder = new StringBuilder();
try {
int chunkNo = 0;
for (int chunkX = lowestChunkX; chunkX <= highestChunkX; chunkX++) {
for (int chunkY = lowestChunkY; chunkY <= highestChunkY; chunkY++) {
chunkNo++;
if (progressReceiver != null) {
progressReceiver.setProgress((float) chunkNo / 1156);
}
Chunk newChunk;
if (dimension.getTile(chunkX >> 3, chunkY >> 3) == null) {
// The tile for this chunk does not exist in the new
// world, so the chunk from the existing world should
// be copied
newChunk = null;
} else {
newChunk = minecraftWorld.getChunk(chunkX, chunkY);
}
if (replaceChunks && (newChunk != null)) {
// as is
continue;
}
int regionX = chunkX >> 5;
int regionY = chunkY >> 5;
Point coords = new Point(regionX, regionY);
if (damagedRegions.contains(coords)) {
// reported and logged earlier
continue;
}
RegionFile regionFile = regionFiles.get(coords);
if (regionFile == null) {
File file = new File(oldRegionDir, "r." + regionX + "." + regionY + (platform.equals(DefaultPlugin.JAVA_ANVIL) ? ".mca" : ".mcr"));
try {
regionFile = new RegionFile(file);
regionFiles.put(coords, regionFile);
} catch (IOException e) {
reportBuilder.append("I/O error while opening region file " + file + " (message: \"" + e.getMessage() + "\"); skipping region" + EOL);
logger.error("I/O error while opening region file " + file + "; skipping region", e);
damagedRegions.add(coords);
continue;
}
}
int chunkXInRegion = chunkX & 0x1f;
int chunkYInRegion = chunkY & 0x1f;
if (regionFile.containsChunk(chunkXInRegion, chunkYInRegion)) {
Tag tag;
try {
DataInputStream chunkData = regionFile.getChunkDataInputStream(chunkXInRegion, chunkYInRegion);
if (chunkData == null) {
// This should never happen, since we checked with
// isChunkPresent(), but in practice it does. Perhaps
// corrupted data?
reportBuilder.append("Missing chunk data in existing map for chunk " + chunkXInRegion + ", " + chunkYInRegion + " in " + regionFile + "; skipping chunk" + EOL);
logger.warn("Missing chunk data in existing map for chunk " + chunkXInRegion + ", " + chunkYInRegion + " in " + regionFile + "; skipping chunk");
continue;
}
try (NBTInputStream in = new NBTInputStream(chunkData)) {
tag = in.readTag();
}
} catch (IOException e) {
reportBuilder.append("I/O error while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("I/O error while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + "; skipping chunk", e);
continue;
} catch (IllegalArgumentException e) {
reportBuilder.append("Illegal argument exception while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("Illegal argument exception while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + "; skipping chunk", e);
continue;
}
Chunk existingChunk = platform.equals(DefaultPlugin.JAVA_ANVIL) ? new ChunkImpl2((CompoundTag) tag, maxHeight) : new ChunkImpl((CompoundTag) tag, maxHeight);
if (newChunk != null) {
// Chunk exists in existing and new world; merge it
// Do any necessary processing of the existing chunk
// (clearing trees, etc.) No need to check for
// read-only; if the chunk was read-only it
// wouldn't exist in the new map and we wouldn't
// be here
processExistingChunk(existingChunk);
try {
newChunk = mergeChunk(existingChunk, newChunk, dimension);
minecraftWorld.addChunk(newChunk);
} catch (NullPointerException e) {
reportBuilder.append("Null pointer exception while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + "; skipping chunk" + EOL);
logger.error("Null pointer exception while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + "; skipping chunk", e);
continue;
} catch (ArrayIndexOutOfBoundsException e) {
reportBuilder.append("Array index out of bounds while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("Array index out of bounds while reading chunk in existing map " + chunkXInRegion + ", " + chunkYInRegion + " from file " + regionFile + "; skipping chunk", e);
continue;
}
} else {
// Chunk exists in existing world, but not in new
// one, copy old to new
minecraftWorld.addChunk(existingChunk);
}
}
}
}
} finally {
for (RegionFile regionFile : regionFiles.values()) {
regionFile.close();
}
}
if (progressReceiver != null) {
progressReceiver.setProgress(1.0f);
}
return reportBuilder.length() != 0 ? reportBuilder.toString() : null;
}
use of org.jnbt.NBTInputStream in project WorldPainter by Captain-Chaos.
the class Schematic method load.
/**
* Load a custom object in schematic format from an input stream. The stream
* is closed before exiting the method.
*
* @param name The name of the object.
* @param stream The input stream from which to load the object.
* @return A new <code>Schematic</code> containing the contents of the
* specified stream.
* @throws IOException If an I/O error occurred while reading the stream.
*/
public static Schematic load(String name, InputStream stream) throws IOException {
InputStream in = new BufferedInputStream(stream);
// noinspection TryFinallyCanBeTryWithResources // Not possible due to assignment of 'in' inside block
try {
byte[] magicNumber = new byte[2];
in.mark(2);
in.read(magicNumber);
in.reset();
if ((magicNumber[0] == (byte) 0x1f) && (magicNumber[1] == (byte) 0x8b)) {
in = new GZIPInputStream(in);
}
NBTInputStream nbtIn = new NBTInputStream(in);
CompoundTag tag = (CompoundTag) nbtIn.readTag();
return new Schematic(name, tag, null);
} finally {
in.close();
}
}
use of org.jnbt.NBTInputStream in project WorldPainter by Captain-Chaos.
the class JavaMapImporter method importDimension.
private String importDimension(File regionDir, Dimension dimension, int version, ProgressReceiver progressReceiver) throws IOException, ProgressReceiver.OperationCancelled {
if (progressReceiver != null) {
progressReceiver.setMessage(dimension.getName() + " dimension");
}
final int maxHeight = dimension.getMaxHeight();
final int maxY = maxHeight - 1;
final Pattern regionFilePattern = (version == SUPPORTED_VERSION_1) ? Pattern.compile("r\\.-?\\d+\\.-?\\d+\\.mcr") : Pattern.compile("r\\.-?\\d+\\.-?\\d+\\.mca");
final File[] regionFiles = regionDir.listFiles((dir, name) -> regionFilePattern.matcher(name).matches());
if ((regionFiles == null) || (regionFiles.length == 0)) {
throw new RuntimeException("The " + dimension.getName() + " dimension of this map has no region files!");
}
final Set<Point> newChunks = new HashSet<>();
// final SortedSet<Material> manMadeBlockTypes = new TreeSet<Material>();
final boolean importBiomes = (version == SUPPORTED_VERSION_2) && (dimension.getDim() == DIM_NORMAL);
final int total = regionFiles.length * 1024;
int count = 0;
final StringBuilder reportBuilder = new StringBuilder();
for (File file : regionFiles) {
try {
RegionFile regionFile = new RegionFile(file);
try {
for (int x = 0; x < 32; x++) {
for (int z = 0; z < 32; z++) {
if (progressReceiver != null) {
progressReceiver.setProgress((float) count / total);
count++;
}
final Point chunkCoords = new Point((regionFile.getX() << 5) | x, (regionFile.getZ() << 5) | z);
if ((chunksToSkip != null) && chunksToSkip.contains(chunkCoords)) {
continue;
}
if (regionFile.containsChunk(x, z)) {
final Tag tag;
try {
final InputStream chunkData = regionFile.getChunkDataInputStream(x, z);
if (chunkData == null) {
// This should never happen, since we checked
// with isChunkPresent(), but in practice it
// does. Perhaps corrupted data?
reportBuilder.append("Missing chunk data for chunk " + x + ", " + z + " in " + file + "; skipping chunk" + EOL);
logger.warn("Missing chunk data for chunk " + x + ", " + z + " in " + file + "; skipping chunk");
continue;
}
try (NBTInputStream in = new NBTInputStream(chunkData)) {
tag = in.readTag();
}
} catch (IOException e) {
reportBuilder.append("I/O error while reading chunk " + x + ", " + z + " from file " + file + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("I/O error while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk", e);
continue;
} catch (IllegalArgumentException e) {
reportBuilder.append("Illegal argument exception while reading chunk " + x + ", " + z + " from file " + file + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("Illegal argument exception while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk", e);
continue;
} catch (NegativeArraySizeException e) {
reportBuilder.append("Negative array size exception while reading chunk " + x + ", " + z + " from file " + file + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("Negative array size exception while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk", e);
continue;
}
final Chunk chunk = (version == SUPPORTED_VERSION_1) ? new ChunkImpl((CompoundTag) tag, maxHeight) : new ChunkImpl2((CompoundTag) tag, maxHeight);
final Point tileCoords = new Point(chunk.getxPos() >> 3, chunk.getzPos() >> 3);
Tile tile = dimension.getTile(tileCoords);
if (tile == null) {
tile = dimension.getTileFactory().createTile(tileCoords.x, tileCoords.y);
for (int xx = 0; xx < 8; xx++) {
for (int yy = 0; yy < 8; yy++) {
newChunks.add(new Point((tileCoords.x << TILE_SIZE_BITS) | (xx << 4), (tileCoords.y << TILE_SIZE_BITS) | (yy << 4)));
}
}
dimension.addTile(tile);
}
newChunks.remove(new Point(chunk.getxPos() << 4, chunk.getzPos() << 4));
boolean manMadeStructuresBelowGround = false;
boolean manMadeStructuresAboveGround = false;
try {
for (int xx = 0; xx < 16; xx++) {
for (int zz = 0; zz < 16; zz++) {
float height = -1.0f;
int waterLevel = 0;
boolean floodWithLava = false, frost = false;
Terrain terrain = Terrain.BEDROCK;
for (int y = maxY; y >= 0; y--) {
int blockType = chunk.getBlockType(xx, y, zz);
int data = chunk.getDataValue(xx, y, zz);
if (!NATURAL_BLOCKS.get(blockType)) {
if (height == -1.0f) {
manMadeStructuresAboveGround = true;
} else {
manMadeStructuresBelowGround = true;
}
// manMadeBlockTypes.add(Material.get(blockType, data));
}
if ((blockType == BLK_SNOW) || (blockType == BLK_ICE)) {
frost = true;
}
if (((blockType == BLK_ICE) || (blockType == BLK_FROSTED_ICE) || (((blockType == BLK_STATIONARY_WATER) || (blockType == BLK_WATER) || (blockType == BLK_STATIONARY_LAVA) || (blockType == BLK_LAVA)) && (data == 0))) && (waterLevel == 0)) {
waterLevel = y;
if ((blockType == BLK_LAVA) || (blockType == BLK_STATIONARY_LAVA)) {
floodWithLava = true;
}
} else if (height == -1.0f) {
final Material material = Material.get(blockType, data);
if (SPECIAL_TERRAIN_MAPPING.containsKey(material)) {
// Special terrain found
// Value that falls in the middle of the lowest one eigthth which will still round to the same integer value and will receive a one layer thick smooth snow block (principle of least surprise)
height = y - 0.4375f;
terrain = SPECIAL_TERRAIN_MAPPING.get(material);
} else if (TERRAIN_MAPPING.containsKey(blockType)) {
// Terrain found
// Value that falls in the middle of the lowest one eigthth which will still round to the same integer value and will receive a one layer thick smooth snow block (principle of least surprise)
height = y - 0.4375f;
terrain = TERRAIN_MAPPING.get(blockType);
}
}
}
// Use smooth snow, if present, to better approximate world height, so smooth snow will survive merge
final int intHeight = (int) (height + 0.5f);
if ((height != -1.0f) && (intHeight < maxY) && (chunk.getBlockType(xx, intHeight + 1, zz) == BLK_SNOW)) {
int data = chunk.getDataValue(xx, intHeight + 1, zz);
height += data * 0.125;
}
if ((waterLevel == 0) && (height >= 61.5f)) {
waterLevel = 62;
}
final int blockX = (chunk.getxPos() << 4) | xx;
final int blockY = (chunk.getzPos() << 4) | zz;
final Point coords = new Point(blockX, blockY);
dimension.setTerrainAt(coords, terrain);
dimension.setHeightAt(coords, Math.max(height, 0.0f));
dimension.setWaterLevelAt(blockX, blockY, waterLevel);
if (frost) {
dimension.setBitLayerValueAt(Frost.INSTANCE, blockX, blockY, true);
}
if (floodWithLava) {
dimension.setBitLayerValueAt(FloodWithLava.INSTANCE, blockX, blockY, true);
}
if (height == -1.0f) {
dimension.setBitLayerValueAt(org.pepsoft.worldpainter.layers.Void.INSTANCE, blockX, blockY, true);
}
if (importBiomes && chunk.isBiomesAvailable()) {
final int biome = chunk.getBiome(xx, zz);
// adjust the biome when the user makes changes
if ((biome != 255) && (biome != dimension.getAutoBiome(blockX, blockY))) {
dimension.setLayerValueAt(Biome.INSTANCE, blockX, blockY, biome);
}
}
}
}
} catch (NullPointerException e) {
reportBuilder.append("Null pointer exception while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk" + EOL);
logger.error("Null pointer exception while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk", e);
continue;
} catch (ArrayIndexOutOfBoundsException e) {
reportBuilder.append("Array index out of bounds while reading chunk " + x + ", " + z + " from file " + file + " (message: \"" + e.getMessage() + "\"); skipping chunk" + EOL);
logger.error("Array index out of bounds while reading chunk " + x + ", " + z + " from file " + file + "; skipping chunk", e);
continue;
}
if (((readOnlyOption == ReadOnlyOption.MAN_MADE) && (manMadeStructuresBelowGround || manMadeStructuresAboveGround)) || ((readOnlyOption == ReadOnlyOption.MAN_MADE_ABOVE_GROUND) && manMadeStructuresAboveGround) || (readOnlyOption == ReadOnlyOption.ALL)) {
dimension.setBitLayerValueAt(ReadOnly.INSTANCE, chunk.getxPos() << 4, chunk.getzPos() << 4, true);
}
}
}
}
} finally {
regionFile.close();
}
} catch (IOException e) {
reportBuilder.append("I/O error while opening region file " + file + " (message: \"" + e.getMessage() + "\"); skipping region" + EOL);
logger.error("I/O error while opening region file " + file + "; skipping region", e);
}
}
// Process chunks that were only added to fill out a tile
for (Point newChunkCoords : newChunks) {
dimension.setBitLayerValueAt(NotPresent.INSTANCE, newChunkCoords.x, newChunkCoords.y, true);
if (populateNewChunks) {
dimension.setBitLayerValueAt(Populate.INSTANCE, newChunkCoords.x, newChunkCoords.y, true);
}
}
if (progressReceiver != null) {
progressReceiver.setProgress(1.0f);
}
return reportBuilder.length() != 0 ? reportBuilder.toString() : null;
}
Aggregations