Verified Commit af301c3c authored by Brad Smith's avatar Brad Smith

prototype cli to extract biomes as image

parent 69ffc3bc
Pipeline #39270590 failed with stages
in 12 minutes and 33 seconds
"""Tools for interacting with Minecraft "mca" world region files."""
from . import biome, definitions, fileutils, region # noqa: F401
from .cli import main
if __name__ == "__main__":
main()
import numpy as np
from mcatools.region import calculate_chunk_bounds, Region
from mcatools.definitions import (
CHUNK_WIDTH_BLOCKS,
REGION_WIDTH_BLOCKS,
REGION_TOTAL_BLOCKS,
)
def biome_id_to_rgb(biome_id):
# TODO build a static color map with meaningful values.
color_int = int(biome_id / 180 * 16777215) # FIXME magic numbers
color_bytes = color_int.to_bytes(3, byteorder="big")
return np.array([color_bytes[0], color_bytes[1], color_bytes[2]], dtype=np.uint8)
def extract_region_biomes(region: Region) -> np.ndarray:
"""
Extract the full region's biome definition into a region-shaped array.
:return: np.ndarray: the biome ids
"""
region_biomes = np.zeros(REGION_TOTAL_BLOCKS, dtype=np.uint8).reshape(
REGION_WIDTH_BLOCKS, REGION_WIDTH_BLOCKS
)
for index, chunk in enumerate(region.chunks):
# Remember: z increments and wraps around before x increments!
z_start, z_end, x_start, x_end = calculate_chunk_bounds(index)
if chunk.empty:
continue
chunk_biome = chunk.nbt_data["Level"]["Biomes"].reshape(
CHUNK_WIDTH_BLOCKS, CHUNK_WIDTH_BLOCKS
)
region_biomes[z_start:z_end, x_start:x_end] = chunk_biome
return region_biomes
import click
from mcatools.biome import extract_region_biomes
from mcatools.fileutils import save_biome_image_rgb, extract_region, readfile
@click.group()
def main():
pass
@click.command()
@click.argument("fromregion")
@click.argument("toimage")
def export_biome(fromregion, toimage):
click.echo(f"Loading region data from: {fromregion}")
filedata = readfile(fromregion)
region = extract_region(filedata)
biome_data = extract_region_biomes(region)
save_biome_image_rgb(biome_data, toimage)
click.echo(f"Biome data saved to: {toimage}")
main.add_command(export_biome)
from enum import Enum
CHUNK_WIDTH_BLOCKS = 16
CHUNK_TOTAL_BLOCKS = 256 # 16 * 16
REGION_WIDTH_CHUNKS = 32
REGION_TOTAL_CHUNKS = 1024 # 32 * 32
REGION_WIDTH_BLOCKS = 512 # 16 * 32
REGION_TOTAL_BLOCKS = 262_144 # 512 * 512
class BiomeType(Enum):
ocean = 0
plains = 1
desert = 2
mountains = 3
forest = 4
taiga = 5
swamp = 6
river = 7
nether = 8
the_end = 9
frozen_ocean = 10
frozen_river = 11
snowy_tundra = 12
snowy_mountains = 13
mushroom_fields = 14
mushroom_field_shore = 15
beach = 16
desert_hills = 17
wooded_hills = 18
taiga_hills = 19
mountain_edge = 20
jungle = 21
jungle_hills = 22
jungle_edge = 23
deep_ocean = 24
stone_shore = 25
snowy_beach = 26
birch_forest = 27
birch_forest_hills = 28
dark_forest = 29
snowy_taiga = 30
snowy_taiga_hills = 31
giant_tree_taiga = 32
giant_tree_taiga_hills = 33
wooded_mountains = 34
savanna = 35
savanna_plateau = 36
badlands = 37
wooded_badlands_plateau = 38
badlands_plateau = 39
small_end_islands = 40
end_midlands = 41
end_highlands = 42
end_barrens = 43
warm_ocean = 44
lukewarm_ocean = 45
cold_ocean = 46
deep_warm_ocean = 47
deep_lukewarm_ocean = 48
deep_cold_ocean = 49
deep_frozen_ocean = 50
the_void = 127
sunflower_plains = 129
desert_lakes = 130
gravelly_mountains = 131
flower_forest = 132
taiga_mountains = 133
swamp_hills = 134
ice_spikes = 140
modified_jungle = 149
modified_jungle_edge = 151
tall_birch_forest = 155
tall_birch_hills = 156
dark_forest_hills = 157
snowy_taiga_mountains = 158
giant_spruce_taiga = 160
giant_spruce_taiga_hills = 161
modified_gravelly_mountains = 162
shattered_savanna = 163
shattered_savanna_plateau = 164
eroded_badlands = 165
modified_wooded_badlands_plateau = 166
modified_badlands_plateau = 167
bamboo_jungle = 168
bamboo_jungle_hills = 169
import functools
import io
import itertools
import logging
import zlib
import nbtlib
import numpy as np
import tqdm
from PIL import Image
from mcatools.region import Region, Chunk
from mcatools.definitions import REGION_WIDTH_CHUNKS, REGION_TOTAL_CHUNKS
from mcatools.biome import biome_id_to_rgb
def save_biome_image_rgb(biome_data: np.ndarray, filename: str):
color_data = np.zeros((biome_data.size, 3), dtype=np.uint8)
for index, block in tqdm.tqdm(
enumerate(biome_data.flatten()), desc="coloring biome", total=biome_data.size
):
color_data[index] = biome_id_to_rgb(block)
colors = color_data.reshape(biome_data.shape[0], biome_data.shape[1], 3)
im = Image.fromarray(colors, "RGB")
im.save(filename)
def save_biome_image_mono(biome_data: np.ndarray, filename: str):
brighter = biome_data / biome_data.max() * 256
im = Image.fromarray(np.uint8(brighter))
im.save(filename)
def readfile(filename):
with open(filename, "rb") as f:
data = f.read()
return data
def extract_nbt_data(chunk_data: bytes) -> nbtlib.tag.Compound:
"""
Extract, decompress, and parse NBT data from raw chunk data.
:param chunk_data: the full raw bytes from disk for one whole chunk
:return: nbtlib.tag.Compound: the NBT data's root node
"""
length = int_from_bytes(chunk_data[0:4])
compression_type = int_from_bytes(chunk_data[4:1])
if compression_type != 2:
logging.info(
f"Unsupported compression type {compression_type}; expect the unexpected"
)
data_start = 5
data_end = data_start + length - 1
compressed_data = chunk_data[data_start:data_end]
raw_nbt_data = zlib.decompress(compressed_data)
bytes_io = io.BytesIO()
bytes_io.write(raw_nbt_data)
bytes_io.seek(0)
nbt_data = nbtlib.File.parse(bytes_io).root
return nbt_data
def extract_region(data: bytes) -> Region:
"""
Extract chunks from a raw `mca` region file.
:param data: bytes of the complete region file
:return: Region with populated Chunk objects that represent the region file's data
"""
chunks = []
for x, z in tqdm.tqdm(
itertools.product(range(REGION_WIDTH_CHUNKS), range(REGION_WIDTH_CHUNKS)),
desc="extracting region data",
total=REGION_TOTAL_CHUNKS,
):
location_data_start = 4 * (
(x % REGION_WIDTH_CHUNKS) + (z % REGION_WIDTH_CHUNKS) * REGION_WIDTH_CHUNKS
)
location_data_end = location_data_start + 4
location_data = data[location_data_start:location_data_end]
offset = int_from_bytes(location_data[0:3])
sector_count = location_data[3]
timestamp_data_start = location_data_start + 4096
timestamp_data_end = timestamp_data_start + 4
timestamp = int_from_bytes(data[timestamp_data_start:timestamp_data_end])
chunk = Chunk(x, z, offset, sector_count, timestamp)
if not chunk.empty:
chunk_data_start = offset * 4096
chunk_data_end = chunk_data_start + sector_count * 4096
chunk_data = data[chunk_data_start:chunk_data_end]
nbt_data = extract_nbt_data(chunk_data)
chunk.nbt_data = nbt_data
chunks.append(chunk)
region = Region(chunks)
return region
int_from_bytes = functools.partial(int.from_bytes, byteorder="big")
import datetime
import math
import nbtlib
from mcatools.definitions import (
CHUNK_WIDTH_BLOCKS,
REGION_WIDTH_CHUNKS,
REGION_TOTAL_CHUNKS,
)
def calculate_chunk_bounds(index):
"""
Calculate a chunk's location bounds in the flattened blocks.
:param index:
:return:
"""
z_start = (index % REGION_WIDTH_CHUNKS) * CHUNK_WIDTH_BLOCKS
z_end = z_start + CHUNK_WIDTH_BLOCKS
x_start = math.floor(index / REGION_WIDTH_CHUNKS) * CHUNK_WIDTH_BLOCKS
x_end = x_start + CHUNK_WIDTH_BLOCKS
return z_start, z_end, x_start, x_end
class Region(object):
"""Collection of 1024 Chunk objects."""
def __init__(self, chunks: list):
if len(chunks) != REGION_TOTAL_CHUNKS:
raise Exception(
f"Region needs { REGION_TOTAL_CHUNKS } chunks, not {len(chunks)}."
)
self.chunks = chunks
class Chunk(object):
"""Everything you need to know about a 16x256x16 chunk."""
def __init__(
self,
x: int,
z: int,
offset: int,
sector_count: int,
timestamp: int,
nbt_data: nbtlib.tag.Compound = None,
):
self.x = x
self.z = z
self.offset = offset
self.sector_count = sector_count
self.timestamp = timestamp
self.nbt_data = nbt_data
@property
def empty(self) -> bool:
return 0 == self.offset == self.sector_count
@property
def datetime(self) -> datetime.datetime:
return datetime.datetime.fromtimestamp(self.timestamp)
def __repr__(self):
repr_data = {
"x": self.x,
"z": self.z,
"empty": self.empty,
"datetime": str(self.datetime),
}
return f"<{self.__class__.__name__} {repr_data}>"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment