From b1d7c7a3848adbfe06737907958a523f68fd662e Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Thu, 23 May 2024 19:15:10 -0700 Subject: [PATCH 1/6] WIP Run Length Encoding for Levels --- pkg/level/chunk.go | 36 +++---- pkg/level/chunk_map.go | 14 ++- pkg/level/chunk_rle.go | 201 +++++++++++++++++++++++++++++++++++ pkg/level/chunk_test.go | 78 ++++++++++---- pkg/level/chunker.go | 26 +++++ pkg/level/chunker_zipfile.go | 8 +- 6 files changed, 311 insertions(+), 52 deletions(-) create mode 100644 pkg/level/chunk_rle.go diff --git a/pkg/level/chunk.go b/pkg/level/chunk.go index 85e2cb7..fbee1c8 100644 --- a/pkg/level/chunk.go +++ b/pkg/level/chunk.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/binary" "encoding/json" + "errors" "fmt" "image" "math" @@ -19,6 +20,7 @@ import ( // Types of chunks. const ( MapType uint64 = iota + RLEType GridType ) @@ -53,6 +55,7 @@ type JSONChunk struct { // Accessor provides a high-level API to interact with absolute pixel coordinates // while abstracting away the details of how they're stored. type Accessor interface { + SetChunkCoordinate(render.Point, uint8) Inflate(*Palette) error Iter() <-chan Pixel IterViewport(viewport render.Rect) <-chan Pixel @@ -62,15 +65,13 @@ type Accessor interface { Len() int MarshalBinary() ([]byte, error) UnmarshalBinary([]byte) error - MarshalJSON() ([]byte, error) - UnmarshalJSON([]byte) error } // NewChunk creates a new chunk. func NewChunk() *Chunk { return &Chunk{ - Type: MapType, - Accessor: NewMapAccessor(), + Type: RLEType, + Accessor: NewRLEAccessor(), } } @@ -330,23 +331,6 @@ func (c *Chunk) Usage(size int) float64 { return float64(c.Len()) / float64(size) } -// MarshalJSON writes the chunk to JSON. -// -// DEPRECATED: MarshalBinary will encode chunks to a tighter binary format. -func (c *Chunk) MarshalJSON() ([]byte, error) { - data, err := c.Accessor.MarshalJSON() - if err != nil { - return []byte{}, err - } - - generic := &JSONChunk{ - Type: c.Type, - Data: data, - } - b, err := json.Marshal(generic) - return b, err -} - // UnmarshalJSON loads the chunk from JSON and uses the correct accessor to // parse the inner details. // @@ -363,7 +347,10 @@ func (c *Chunk) UnmarshalJSON(b []byte) error { switch c.Type { case MapType: c.Accessor = NewMapAccessor() - return c.Accessor.UnmarshalJSON(generic.Data) + if unmarshaler, ok := c.Accessor.(json.Unmarshaler); ok { + return unmarshaler.UnmarshalJSON(generic.Data) + } + return errors.New("Chunk.UnmarshalJSON: this chunk doesn't support JSON unmarshaling") default: return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type) } @@ -407,6 +394,11 @@ func (c *Chunk) UnmarshalBinary(b []byte) error { switch chunkType { case MapType: c.Accessor = NewMapAccessor() + c.Accessor.SetChunkCoordinate(c.Point, c.Size) + return c.Accessor.UnmarshalBinary(reader.Bytes()) + case RLEType: + c.Accessor = NewRLEAccessor() + c.Accessor.SetChunkCoordinate(c.Point, c.Size) return c.Accessor.UnmarshalBinary(reader.Bytes()) default: return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type) diff --git a/pkg/level/chunk_map.go b/pkg/level/chunk_map.go index 652b50c..ebbd839 100644 --- a/pkg/level/chunk_map.go +++ b/pkg/level/chunk_map.go @@ -16,8 +16,10 @@ import ( // MapAccessor implements a chunk accessor by using a map of points to their // palette indexes. This is the simplest accessor and is best for sparse chunks. type MapAccessor struct { - grid map[render.Point]*Swatch - mu sync.RWMutex + coord render.Point `json:"-"` // chunk coordinate, assigned by Chunker + size uint8 `json:"-"` // chunk size, assigned by Chunker + grid map[render.Point]*Swatch + mu sync.RWMutex } // NewMapAccessor initializes a MapAccessor. @@ -27,6 +29,12 @@ func NewMapAccessor() *MapAccessor { } } +// SetChunkCoordinate receives our chunk's coordinate from the Chunker. +func (a *MapAccessor) SetChunkCoordinate(p render.Point, size uint8) { + a.coord = p + a.size = size +} + // Inflate the sparse swatches from their palette indexes. func (a *MapAccessor) Inflate(pal *Palette) error { for point, swatch := range a.grid { @@ -271,7 +279,7 @@ func (a *MapAccessor) UnmarshalBinary(compressed []byte) error { defer a.mu.Unlock() // New format: decompress the byte stream. - //log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) + log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) var reader = bytes.NewBuffer(compressed) diff --git a/pkg/level/chunk_rle.go b/pkg/level/chunk_rle.go new file mode 100644 index 0000000..bf0ee68 --- /dev/null +++ b/pkg/level/chunk_rle.go @@ -0,0 +1,201 @@ +package level + +import ( + "bytes" + "encoding/binary" + "errors" + + "git.kirsle.net/SketchyMaze/doodle/pkg/log" + "git.kirsle.net/go/render" +) + +// RLEAccessor implements a chunk accessor which stores its on-disk format using +// Run Length Encoding (RLE), but in memory behaves equivalently to the MapAccessor. +type RLEAccessor struct { + acc *MapAccessor +} + +// NewRLEAccessor initializes a RLEAccessor. +func NewRLEAccessor() *RLEAccessor { + return &RLEAccessor{ + acc: NewMapAccessor(), + } +} + +// SetChunkCoordinate receives our chunk's coordinate from the Chunker. +func (a *RLEAccessor) SetChunkCoordinate(p render.Point, size uint8) { + a.acc.coord = p + a.acc.size = size +} + +// Inflate the sparse swatches from their palette indexes. +func (a *RLEAccessor) Inflate(pal *Palette) error { + return a.acc.Inflate(pal) +} + +// Len returns the current size of the map, or number of pixels registered. +func (a *RLEAccessor) Len() int { + return a.acc.Len() +} + +// IterViewport returns a channel to loop over pixels in the viewport. +func (a *RLEAccessor) IterViewport(viewport render.Rect) <-chan Pixel { + return a.acc.IterViewport(viewport) +} + +// Iter returns a channel to loop over all points in this chunk. +func (a *RLEAccessor) Iter() <-chan Pixel { + return a.acc.Iter() +} + +// Get a pixel from the map. +func (a *RLEAccessor) Get(p render.Point) (*Swatch, error) { + return a.acc.Get(p) +} + +// Set a pixel on the map. +func (a *RLEAccessor) Set(p render.Point, sw *Swatch) error { + return a.acc.Set(p, sw) +} + +// Delete a pixel from the map. +func (a *RLEAccessor) Delete(p render.Point) error { + return a.acc.Delete(p) +} + +// Make2DChunkGrid creates a 2D map of uint64 pointers matching the square dimensions of the given size. +// +// It is used by the RLEAccessor to flatten a chunk into a grid for run-length encoding. +func Make2DChunkGrid(size int) ([][]*uint64, error) { + // Sanity check if the chunk was properly initialized. + if size == 0 { + return nil, errors.New("chunk not initialized correctly with its size and coordinate") + } + + var grid = make([][]*uint64, size) + for i := 0; i < size; i++ { + grid[i] = make([]*uint64, size) + } + + return grid, nil +} + +/* +MarshalBinary converts the chunk data to a binary representation. + +This accessor uses Run Length Encoding (RLE) in its binary format. Starting +with the top-left pixel of this chunk, the binary format is a stream of bytes +formatted as such: + +- UVarint for the palette index number (0-255), with 0xFF meaning void +- UVarint for the length of repetition of that palette index +*/ +func (a *RLEAccessor) MarshalBinary() ([]byte, error) { + // Flatten the chunk out into a full 2D array of all its points. + var ( + size = int(a.acc.size) + grid, err = Make2DChunkGrid(size) + ) + if err != nil { + return nil, err + } + + // Populate the dense 2D array of its pixels. + for px := range a.Iter() { + var ( + point = render.NewPoint(px.X, px.Y) + relative = RelativeCoordinate(point, a.acc.coord, a.acc.size) + ptr = uint64(px.PaletteIndex) + ) + grid[relative.Y][relative.X] = &ptr + } + + // log.Error("2D GRID:\n%+v", grid) + + // Run-length encode the grid. + var ( + compressed []byte + firstColor = true + lastColor uint64 + runLength uint64 + ) + for _, row := range grid { + for _, color := range row { + var index uint64 + if color == nil { + index = 0xFF + } + + if firstColor { + lastColor = index + runLength = 1 + firstColor = false + continue + } + + if index != lastColor { + compressed = binary.AppendUvarint(compressed, index) + compressed = binary.AppendUvarint(compressed, runLength) + lastColor = index + runLength = 1 + continue + } + + runLength++ + } + } + + log.Error("RLE compressed: %v", compressed) + + return compressed, nil +} + +// UnmarshalBinary will decode a compressed RLEAccessor byte stream. +func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { + a.acc.mu.Lock() + defer a.acc.mu.Unlock() + + // New format: decompress the byte stream. + log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) + + // Prepare the 2D grid to decompress the RLE stream into. + var ( + size = int(a.acc.size) + _, err = Make2DChunkGrid(size) + x, y, cursor int + ) + if err != nil { + return err + } + + var reader = bytes.NewBuffer(compressed) + + for { + var ( + paletteIndex, err1 = binary.ReadUvarint(reader) + repeatCount, err2 = binary.ReadUvarint(reader) + ) + + if err1 != nil || err2 != nil { + log.Error("reading Uvarints from compressed data: {%s, %s}", err1, err2) + break + } + + for i := uint64(0); i < repeatCount; i++ { + cursor++ + if cursor%size == 0 { + y++ + x = 0 + } else { + x++ + } + + point := render.NewPoint(int(x), int(y)) + if paletteIndex != 0xFF { + a.acc.grid[point] = NewSparseSwatch(int(paletteIndex)) + } + } + } + + return nil +} diff --git a/pkg/level/chunk_test.go b/pkg/level/chunk_test.go index 43c4300..e9c614f 100644 --- a/pkg/level/chunk_test.go +++ b/pkg/level/chunk_test.go @@ -242,54 +242,88 @@ func TestChunkCoordinates(t *testing.T) { c := level.NewChunker(128) type testCase struct { - In render.Point - Expect render.Point + WorldCoordinate render.Point + ChunkCoordinate render.Point + RelativeCoordinate render.Point } tests := []testCase{ testCase{ - In: render.NewPoint(0, 0), - Expect: render.NewPoint(0, 0), + WorldCoordinate: render.NewPoint(0, 0), + ChunkCoordinate: render.NewPoint(0, 0), + RelativeCoordinate: render.NewPoint(0, 0), }, testCase{ - In: render.NewPoint(128, 128), - Expect: render.NewPoint(0, 0), + WorldCoordinate: render.NewPoint(4, 8), + ChunkCoordinate: render.NewPoint(0, 0), + RelativeCoordinate: render.NewPoint(4, 8), }, testCase{ - In: render.NewPoint(1024, 128), - Expect: render.NewPoint(1, 0), + WorldCoordinate: render.NewPoint(128, 128), + ChunkCoordinate: render.NewPoint(1, 1), + RelativeCoordinate: render.NewPoint(0, 0), }, testCase{ - In: render.NewPoint(3600, 1228), - Expect: render.NewPoint(3, 1), + WorldCoordinate: render.NewPoint(130, 156), + ChunkCoordinate: render.NewPoint(1, 1), + RelativeCoordinate: render.NewPoint(2, 28), }, testCase{ - In: render.NewPoint(-100, -1), - Expect: render.NewPoint(-1, -1), + WorldCoordinate: render.NewPoint(1024, 128), + ChunkCoordinate: render.NewPoint(8, 1), + RelativeCoordinate: render.NewPoint(0, 0), }, testCase{ - In: render.NewPoint(-950, 100), - Expect: render.NewPoint(-1, 0), + WorldCoordinate: render.NewPoint(3600, 1228), + ChunkCoordinate: render.NewPoint(28, 9), + RelativeCoordinate: render.NewPoint(16, 76), }, testCase{ - In: render.NewPoint(-1001, -856), - Expect: render.NewPoint(-2, -1), + WorldCoordinate: render.NewPoint(-100, -1), + ChunkCoordinate: render.NewPoint(-1, -1), + RelativeCoordinate: render.NewPoint(28, 127), }, testCase{ - In: render.NewPoint(-3600, -4800), - Expect: render.NewPoint(-4, -5), + WorldCoordinate: render.NewPoint(-950, 100), + ChunkCoordinate: render.NewPoint(-8, 0), + RelativeCoordinate: render.NewPoint(74, 100), + }, + testCase{ + WorldCoordinate: render.NewPoint(-1001, -856), + ChunkCoordinate: render.NewPoint(-8, -7), + RelativeCoordinate: render.NewPoint(23, 40), + }, + testCase{ + WorldCoordinate: render.NewPoint(-3600, -4800), + ChunkCoordinate: render.NewPoint(-29, -38), + RelativeCoordinate: render.NewPoint(112, 64), }, } for _, test := range tests { - actual := c.ChunkCoordinate(test.In) - if actual != test.Expect { + // Test conversion from world to chunk coordinate. + actual := c.ChunkCoordinate(test.WorldCoordinate) + if actual != test.ChunkCoordinate { t.Errorf( "Failed ChunkCoordinate conversion:\n"+ " Input: %s\n"+ "Expected: %s\n"+ " Got: %s", - test.In, - test.Expect, + test.WorldCoordinate, + test.ChunkCoordinate, + actual, + ) + } + + // Test the relative (inside-chunk) coordinate. + actual = level.RelativeCoordinate(test.WorldCoordinate, actual, c.Size) + if actual != test.RelativeCoordinate { + t.Errorf( + "Failed RelativeCoordinate conversion:\n"+ + " Input: %s\n"+ + "Expected: %s\n"+ + " Got: %s", + test.WorldCoordinate, + test.RelativeCoordinate, actual, ) } diff --git a/pkg/level/chunker.go b/pkg/level/chunker.go index e1c00c8..72e5763 100644 --- a/pkg/level/chunker.go +++ b/pkg/level/chunker.go @@ -74,6 +74,7 @@ func (c *Chunker) Inflate(pal *Palette) error { for coord, chunk := range c.Chunks { chunk.Point = coord chunk.Size = c.Size + chunk.SetChunkCoordinate(chunk.Point, chunk.Size) chunk.Inflate(pal) } return nil @@ -445,6 +446,7 @@ func (c *Chunker) FreeCaches() int { // This function should be the singular writer to the chunk cache. func (c *Chunker) SetChunk(p render.Point, chunk *Chunk) { c.chunkMu.Lock() + chunk.SetChunkCoordinate(p, chunk.Size) c.Chunks[p] = chunk c.chunkMu.Unlock() @@ -605,6 +607,30 @@ func (c *Chunker) ChunkCoordinate(abs render.Point) render.Point { ) } +// RelativeCoordinate will translate from an absolute world coordinate, into one that +// is relative to fit inside of the chunk with the given chunk coordinate and size. +// +// Example: +// +// - With 128x128 chunks and a world coordinate of (280,-600) +// - The ChunkCoordinate would be (2,-4) which encompasses (256,-512) to (383,-639) +// - And relative inside that chunk, the pixel is at (24,) +func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uint8) render.Point { + // Pixel coordinate offset. + var ( + size = int(chunkSize) + offset = render.Point{ + X: chunkCoord.X * size, + Y: chunkCoord.Y * size, + } + ) + + return render.Point{ + X: abs.X - offset.X, + Y: abs.Y - offset.Y, + } +} + // ChunkMap maps a chunk coordinate to its chunk data. type ChunkMap map[render.Point]*Chunk diff --git a/pkg/level/chunker_zipfile.go b/pkg/level/chunker_zipfile.go index 53c1810..8eb04d6 100644 --- a/pkg/level/chunker_zipfile.go +++ b/pkg/level/chunker_zipfile.go @@ -2,6 +2,7 @@ package level import ( "archive/zip" + "errors" "fmt" "io/ioutil" "regexp" @@ -190,11 +191,7 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error { data = bytes } } else { - if json, err := c.MarshalJSON(); err != nil { - return err - } else { - data = json - } + return errors.New("Chunk.ToZipfile: JSON chunk format no longer supported for writing") } // Write the file contents to zip whether binary or json. @@ -226,6 +223,7 @@ func ChunkFromZipfile(zf *zip.Reader, layer int, coord render.Point) (*Chunk, er err = chunk.UnmarshalBinary(bin) if err != nil { + log.Error("ChunkFromZipfile(%s): %s", coord, err) return nil, err } } else if file, err := zf.Open(jsonfile); err == nil { -- 2.30.2 From 5654145fd8bdd706df4c851f3676542b8e0f5113 Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Thu, 23 May 2024 23:02:01 -0700 Subject: [PATCH 2/6] (Experimental) Run Length Encoding for Levels Finally add a second option for Chunk MapAccessor implementation besides the MapAccessor. The RLEAccessor is basically a MapAccessor that will compress your drawing with Run Length Encoding (RLE) in the on-disk format in the ZIP file. This slashes the file sizes of most levels: * Shapeshifter: 21.8 MB -> 8.1 MB * Jungle: 10.4 MB -> 4.1 MB * Zoo: 2.8 MB -> 1.3 MB Implementation details: * The RLE binary format for Chunks is a stream of Uvarint pairs storing the palette index number and the number of pixels to repeat it (along the Y,X axis of the chunk). * Null colors are represented by a Uvarint that decodes to 0xFFFF or 65535 in decimal. * Gameplay logic currently limits maps to 256 colors. * The default for newly created chunks in-game will be RLE by default. * Its in-memory representation is still a MapAccessor (a map of absolute world coordinates to palette index). * The game can still open and play legacy MapAccessor maps. * On save in the editor, the game will upgrade/convert MapAccessor chunks over to RLEAccessors, improving on your level's file size with a simple re-save. Current Bugs * On every re-save to RLE, one pixel is lost in the bottom-right corner of each chunk. Each subsequent re-save loses one more pixel to the left, so what starts as a single pixel per chunk slowly evolves into a horizontal line. * Some pixels smear vertically as well. * Off-by-negative-one errors when some chunks Iter() their pixels but compute a relative coordinate of (-1,0)! Some mismatch between the stored world coords of a pixel inside the chunk vs. the chunk's assigned coordinate by the Chunker: certain combinations of chunk coord/abs coord. To Do * The `doodad touch` command should re-save existing levels to upgrade them. --- cmd/doodle/main.go | 7 ++ pkg/balance/feature_flags.go | 6 ++ pkg/level/chunk.go | 24 +++-- pkg/level/chunk_map.go | 15 ++- pkg/level/chunk_rle.go | 129 ++++++++++-------------- pkg/level/chunk_test.go | 6 +- pkg/level/chunker.go | 34 ++++++- pkg/level/chunker_migrate.go | 67 +++++++++++++ pkg/level/chunker_test.go | 95 ++++++++++++++++++ pkg/level/chunker_zipfile.go | 10 +- pkg/level/fmt_maintenance.go | 15 +++ pkg/level/fmt_readwrite.go | 7 +- pkg/level/rle/rle.go | 189 +++++++++++++++++++++++++++++++++++ pkg/level/rle/rle_test.go | 43 ++++++++ pkg/uix/canvas_editable.go | 2 +- 15 files changed, 542 insertions(+), 107 deletions(-) create mode 100644 pkg/level/chunker_migrate.go create mode 100644 pkg/level/rle/rle.go create mode 100644 pkg/level/rle/rle_test.go diff --git a/cmd/doodle/main.go b/cmd/doodle/main.go index ce4df43..6509fa4 100644 --- a/cmd/doodle/main.go +++ b/cmd/doodle/main.go @@ -101,6 +101,11 @@ func main() { Name: "chdir", Usage: "working directory for the game's runtime package", }, + &cli.BoolFlag{ + Name: "new", + Aliases: []string{"n"}, + Usage: "open immediately to the level editor", + }, &cli.BoolFlag{ Name: "edit", Aliases: []string{"e"}, @@ -248,6 +253,8 @@ func main() { if c.Bool("guitest") { game.Goto(&doodle.GUITestScene{}) + } else if c.Bool("new") { + game.NewMap() } else if filename != "" { if c.Bool("edit") { game.EditFile(filename) diff --git a/pkg/balance/feature_flags.go b/pkg/balance/feature_flags.go index a109dc0..bd23394 100644 --- a/pkg/balance/feature_flags.go +++ b/pkg/balance/feature_flags.go @@ -28,6 +28,12 @@ const ( // If you set both flags to false, level zipfiles will use the classic // json chunk format as before on save. BinaryChunkerEnabled = true + + // Enable "v3" Run-Length Encoding for level chunker. + // + // This only supports Zipfile levels and will use the ".bin" format + // enabled by the previous setting. + RLEBinaryChunkerEnabled = true ) // Feature Flags to turn on/off experimental content. diff --git a/pkg/level/chunk.go b/pkg/level/chunk.go index fbee1c8..f4bc28a 100644 --- a/pkg/level/chunk.go +++ b/pkg/level/chunk.go @@ -24,6 +24,9 @@ const ( GridType ) +// Default chunk type for newly created chunks (was MapType). +const DefaultChunkType = RLEType + // Chunk holds a single portion of the pixel canvas. type Chunk struct { Type uint64 // map vs. 2D array. @@ -55,7 +58,6 @@ type JSONChunk struct { // Accessor provides a high-level API to interact with absolute pixel coordinates // while abstracting away the details of how they're stored. type Accessor interface { - SetChunkCoordinate(render.Point, uint8) Inflate(*Palette) error Iter() <-chan Pixel IterViewport(viewport render.Rect) <-chan Pixel @@ -69,10 +71,11 @@ type Accessor interface { // NewChunk creates a new chunk. func NewChunk() *Chunk { - return &Chunk{ - Type: RLEType, - Accessor: NewRLEAccessor(), + var c = &Chunk{ + Type: RLEType, } + c.Accessor = NewRLEAccessor(c) + return c } // Texture will return a cached texture for the rendering engine for this @@ -335,6 +338,9 @@ func (c *Chunk) Usage(size int) float64 { // parse the inner details. // // DEPRECATED in favor of binary marshalling. +// +// Only supports MapAccessor chunk types, which was the only one supported +// before this function was deprecated. func (c *Chunk) UnmarshalJSON(b []byte) error { // Parse it generically so we can hand off the inner "data" object to the // right accessor for unmarshalling. @@ -346,7 +352,7 @@ func (c *Chunk) UnmarshalJSON(b []byte) error { switch c.Type { case MapType: - c.Accessor = NewMapAccessor() + c.Accessor = NewMapAccessor(c) if unmarshaler, ok := c.Accessor.(json.Unmarshaler); ok { return unmarshaler.UnmarshalJSON(generic.Data) } @@ -393,12 +399,12 @@ func (c *Chunk) UnmarshalBinary(b []byte) error { // Decode the rest of the byte stream. switch chunkType { case MapType: - c.Accessor = NewMapAccessor() - c.Accessor.SetChunkCoordinate(c.Point, c.Size) + c.Type = MapType + c.Accessor = NewMapAccessor(c) return c.Accessor.UnmarshalBinary(reader.Bytes()) case RLEType: - c.Accessor = NewRLEAccessor() - c.Accessor.SetChunkCoordinate(c.Point, c.Size) + c.Type = RLEType + c.Accessor = NewRLEAccessor(c) return c.Accessor.UnmarshalBinary(reader.Bytes()) default: return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type) diff --git a/pkg/level/chunk_map.go b/pkg/level/chunk_map.go index ebbd839..f5b5f3c 100644 --- a/pkg/level/chunk_map.go +++ b/pkg/level/chunk_map.go @@ -16,23 +16,22 @@ import ( // MapAccessor implements a chunk accessor by using a map of points to their // palette indexes. This is the simplest accessor and is best for sparse chunks. type MapAccessor struct { - coord render.Point `json:"-"` // chunk coordinate, assigned by Chunker - size uint8 `json:"-"` // chunk size, assigned by Chunker + chunk *Chunk // Pointer to parent struct, for its Size and Point grid map[render.Point]*Swatch mu sync.RWMutex } // NewMapAccessor initializes a MapAccessor. -func NewMapAccessor() *MapAccessor { +func NewMapAccessor(chunk *Chunk) *MapAccessor { return &MapAccessor{ - grid: map[render.Point]*Swatch{}, + chunk: chunk, + grid: map[render.Point]*Swatch{}, } } -// SetChunkCoordinate receives our chunk's coordinate from the Chunker. -func (a *MapAccessor) SetChunkCoordinate(p render.Point, size uint8) { - a.coord = p - a.size = size +// Reset the MapAccessor. +func (a *MapAccessor) Reset() { + a.grid = map[render.Point]*Swatch{} } // Inflate the sparse swatches from their palette indexes. diff --git a/pkg/level/chunk_rle.go b/pkg/level/chunk_rle.go index bf0ee68..09c8920 100644 --- a/pkg/level/chunk_rle.go +++ b/pkg/level/chunk_rle.go @@ -1,10 +1,7 @@ package level import ( - "bytes" - "encoding/binary" - "errors" - + "git.kirsle.net/SketchyMaze/doodle/pkg/level/rle" "git.kirsle.net/SketchyMaze/doodle/pkg/log" "git.kirsle.net/go/render" ) @@ -12,22 +9,18 @@ import ( // RLEAccessor implements a chunk accessor which stores its on-disk format using // Run Length Encoding (RLE), but in memory behaves equivalently to the MapAccessor. type RLEAccessor struct { - acc *MapAccessor + chunk *Chunk // parent Chunk, for its Size and Point + acc *MapAccessor } // NewRLEAccessor initializes a RLEAccessor. -func NewRLEAccessor() *RLEAccessor { +func NewRLEAccessor(chunk *Chunk) *RLEAccessor { return &RLEAccessor{ - acc: NewMapAccessor(), + chunk: chunk, + acc: NewMapAccessor(chunk), } } -// SetChunkCoordinate receives our chunk's coordinate from the Chunker. -func (a *RLEAccessor) SetChunkCoordinate(p render.Point, size uint8) { - a.acc.coord = p - a.acc.size = size -} - // Inflate the sparse swatches from their palette indexes. func (a *RLEAccessor) Inflate(pal *Palette) error { return a.acc.Inflate(pal) @@ -63,23 +56,6 @@ func (a *RLEAccessor) Delete(p render.Point) error { return a.acc.Delete(p) } -// Make2DChunkGrid creates a 2D map of uint64 pointers matching the square dimensions of the given size. -// -// It is used by the RLEAccessor to flatten a chunk into a grid for run-length encoding. -func Make2DChunkGrid(size int) ([][]*uint64, error) { - // Sanity check if the chunk was properly initialized. - if size == 0 { - return nil, errors.New("chunk not initialized correctly with its size and coordinate") - } - - var grid = make([][]*uint64, size) - for i := 0; i < size; i++ { - grid[i] = make([]*uint64, size) - } - - return grid, nil -} - /* MarshalBinary converts the chunk data to a binary representation. @@ -93,8 +69,8 @@ formatted as such: func (a *RLEAccessor) MarshalBinary() ([]byte, error) { // Flatten the chunk out into a full 2D array of all its points. var ( - size = int(a.acc.size) - grid, err = Make2DChunkGrid(size) + size = int(a.chunk.Size) + grid, err = rle.NewGrid(size) ) if err != nil { return nil, err @@ -104,50 +80,18 @@ func (a *RLEAccessor) MarshalBinary() ([]byte, error) { for px := range a.Iter() { var ( point = render.NewPoint(px.X, px.Y) - relative = RelativeCoordinate(point, a.acc.coord, a.acc.size) - ptr = uint64(px.PaletteIndex) + relative = RelativeCoordinate(point, a.chunk.Point, a.chunk.Size) + ptr = uint64(px.Swatch.Index()) ) + + // TODO: sometimes we get a -1 value in X or Y, not sure why. + if relative.X < 0 || relative.Y < 0 { + continue + } grid[relative.Y][relative.X] = &ptr } - // log.Error("2D GRID:\n%+v", grid) - - // Run-length encode the grid. - var ( - compressed []byte - firstColor = true - lastColor uint64 - runLength uint64 - ) - for _, row := range grid { - for _, color := range row { - var index uint64 - if color == nil { - index = 0xFF - } - - if firstColor { - lastColor = index - runLength = 1 - firstColor = false - continue - } - - if index != lastColor { - compressed = binary.AppendUvarint(compressed, index) - compressed = binary.AppendUvarint(compressed, runLength) - lastColor = index - runLength = 1 - continue - } - - runLength++ - } - } - - log.Error("RLE compressed: %v", compressed) - - return compressed, nil + return grid.Compress() } // UnmarshalBinary will decode a compressed RLEAccessor byte stream. @@ -158,10 +102,39 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { // New format: decompress the byte stream. log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) - // Prepare the 2D grid to decompress the RLE stream into. + grid, err := rle.NewGrid(int(a.chunk.Size)) + if err != nil { + return err + } + + if err := grid.Decompress(compressed); err != nil { + return err + } + + // Load the grid into our MapAccessor. + a.acc.Reset() + for y, row := range grid { + for x, col := range row { + if col == nil { + continue + } + + // TODO: x-1 to avoid the level creeping to the right every save, + // not sure on the root cause! RLEAccessor Decompress? + abs := FromRelativeCoordinate(render.NewPoint(x, y), a.chunk.Point, a.chunk.Size) + abs.X -= 1 + a.acc.grid[abs] = NewSparseSwatch(int(*col)) + } + } + + return nil +} + +/* +// Prepare the 2D grid to decompress the RLE stream into. var ( - size = int(a.acc.size) - _, err = Make2DChunkGrid(size) + size = int(a.chunk.Size) + _, err = rle.NewGrid(size) x, y, cursor int ) if err != nil { @@ -181,6 +154,8 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { break } + log.Warn("RLE index %d for %dpx", paletteIndex, repeatCount) + for i := uint64(0); i < repeatCount; i++ { cursor++ if cursor%size == 0 { @@ -196,6 +171,4 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { } } } - - return nil -} +*/ diff --git a/pkg/level/chunk_test.go b/pkg/level/chunk_test.go index e9c614f..06bfa34 100644 --- a/pkg/level/chunk_test.go +++ b/pkg/level/chunk_test.go @@ -129,8 +129,10 @@ func TestChunker(t *testing.T) { // Test the map chunk accessor. func TestMapAccessor(t *testing.T) { - a := level.NewMapAccessor() - _ = a + var ( + c = level.NewChunk() + a = level.NewMapAccessor(c) + ) // Test action types var ( diff --git a/pkg/level/chunker.go b/pkg/level/chunker.go index 72e5763..134366a 100644 --- a/pkg/level/chunker.go +++ b/pkg/level/chunker.go @@ -74,7 +74,6 @@ func (c *Chunker) Inflate(pal *Palette) error { for coord, chunk := range c.Chunks { chunk.Point = coord chunk.Size = c.Size - chunk.SetChunkCoordinate(chunk.Point, chunk.Size) chunk.Inflate(pal) } return nil @@ -326,7 +325,7 @@ func (c *Chunker) GetChunk(p render.Point) (*Chunk, bool) { // Hit the zipfile for it. if c.Zipfile != nil { - if chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, p); err == nil { + if chunk, err := c.ChunkFromZipfile(p); err == nil { // log.Debug("GetChunk(%s) cache miss, read from zip", p) c.SetChunk(p, chunk) // cache it c.logChunkAccess(p, chunk) // for the LRU cache @@ -446,7 +445,6 @@ func (c *Chunker) FreeCaches() int { // This function should be the singular writer to the chunk cache. func (c *Chunker) SetChunk(p render.Point, chunk *Chunk) { c.chunkMu.Lock() - chunk.SetChunkCoordinate(p, chunk.Size) c.Chunks[p] = chunk c.chunkMu.Unlock() @@ -617,6 +615,32 @@ func (c *Chunker) ChunkCoordinate(abs render.Point) render.Point { // - And relative inside that chunk, the pixel is at (24,) func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uint8) render.Point { // Pixel coordinate offset. + var ( + size = int(chunkSize) + offset = render.Point{ + X: chunkCoord.X * size, + Y: chunkCoord.Y * size, + } + point = render.Point{ + X: abs.X - offset.X, + Y: abs.Y - offset.Y, + } + ) + + if point.X < 0 || point.Y < 0 { + log.Error("RelativeCoordinate: X < 0! abs=%s rel=%s chunk=%s size=%d", abs, point, chunkCoord, chunkSize) + log.Error("RelativeCoordinate(2): size=%d offset=%s point=%s", size, offset, point) + } + + return point +} + +// FromRelativeCoordinate is the inverse of RelativeCoordinate. +// +// With a chunk size of 128 and a relative coordinate like (8, 12), +// this function will return the absolute world coordinates based +// on your chunk.Point's placement in the level. +func FromRelativeCoordinate(rel render.Point, chunkCoord render.Point, chunkSize uint8) render.Point { var ( size = int(chunkSize) offset = render.Point{ @@ -626,8 +650,8 @@ func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uin ) return render.Point{ - X: abs.X - offset.X, - Y: abs.Y - offset.Y, + X: rel.X + offset.X, + Y: rel.Y + offset.Y, } } diff --git a/pkg/level/chunker_migrate.go b/pkg/level/chunker_migrate.go new file mode 100644 index 0000000..7b007cc --- /dev/null +++ b/pkg/level/chunker_migrate.go @@ -0,0 +1,67 @@ +package level + +import ( + "runtime" + "sync" + + "git.kirsle.net/SketchyMaze/doodle/pkg/balance" + "git.kirsle.net/SketchyMaze/doodle/pkg/log" +) + +/* Functions to migrate Chunkers between different implementations. */ + +// OptimizeChunkerAccessors will evaluate all of the chunks of your drawing +// and possibly migrate them to a different Accessor implementation when +// saving on disk. +func (c *Chunker) OptimizeChunkerAccessors() { + c.chunkMu.Lock() + defer c.chunkMu.Unlock() + + log.Info("Optimizing Chunker Accessors") + + // TODO: parallelize this with goroutines + var ( + chunks = make(chan *Chunk, len(c.Chunks)) + wg sync.WaitGroup + ) + + for range runtime.NumCPU() { + wg.Add(1) + go func() { + defer wg.Done() + for chunk := range chunks { + var point = chunk.Point + log.Warn("Chunk %s is a: %d", point, chunk.Type) + + // Upgrade all MapTypes into RLE compressed MapTypes? + if balance.RLEBinaryChunkerEnabled { + if chunk.Type == MapType { + log.Info("Optimizing chunk %s accessor from Map to RLE", point) + ma, _ := chunk.Accessor.(*MapAccessor) + rle := NewRLEAccessor(chunk).FromMapAccessor(ma) + + c.Chunks[point].Type = RLEType + c.Chunks[point].Accessor = rle + } + } + } + }() + } + + // Feed it the chunks. + for _, chunk := range c.Chunks { + chunks <- chunk + } + + close(chunks) + wg.Wait() + +} + +// FromMapAccessor migrates from a MapAccessor to RLE. +func (a *RLEAccessor) FromMapAccessor(ma *MapAccessor) *RLEAccessor { + return &RLEAccessor{ + chunk: a.chunk, + acc: ma, + } +} diff --git a/pkg/level/chunker_test.go b/pkg/level/chunker_test.go index dff40fd..76885fa 100644 --- a/pkg/level/chunker_test.go +++ b/pkg/level/chunker_test.go @@ -228,3 +228,98 @@ func TestViewportChunks(t *testing.T) { } } } + +func TestRelativeCoordinates(t *testing.T) { + + var ( + chunker = level.NewChunker(128) + ) + + type TestCase struct { + WorldCoord render.Point + ChunkCoord render.Point + ExpectRelative render.Point + } + var tests = []TestCase{ + { + WorldCoord: render.NewPoint(4, 8), + ExpectRelative: render.NewPoint(4, 8), + }, + { + WorldCoord: render.NewPoint(128, 128), + ExpectRelative: render.NewPoint(0, 0), + }, + { + WorldCoord: render.NewPoint(143, 144), + ExpectRelative: render.NewPoint(15, 16), + }, + { + WorldCoord: render.NewPoint(-105, -86), + ExpectRelative: render.NewPoint(23, 42), + }, + { + WorldCoord: render.NewPoint(-252, 264), + ExpectRelative: render.NewPoint(4, 8), + }, + + // These were seen breaking actual levels, at the corners of the chunk + { + WorldCoord: render.NewPoint(511, 256), + ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game + }, + { + WorldCoord: render.NewPoint(511, 512), + ChunkCoord: render.NewPoint(4, 4), + ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game + }, + { + WorldCoord: render.NewPoint(127, 384), + ChunkCoord: render.NewPoint(1, 3), + ExpectRelative: render.NewPoint(-1, 0), + }, + } + for i, test := range tests { + var ( + chunkCoord = test.ChunkCoord + actualRelative = level.RelativeCoordinate( + test.WorldCoord, + chunkCoord, + chunker.Size, + ) + roundTrip = level.FromRelativeCoordinate( + actualRelative, + chunkCoord, + chunker.Size, + ) + ) + + // compute expected chunk coord automatically? + if chunkCoord == render.Origin { + chunkCoord = chunker.ChunkCoordinate(test.WorldCoord) + } + + if actualRelative != test.ExpectRelative { + t.Errorf("Test %d: world coord %s in chunk %s\n"+ + "Expected RelativeCoordinate() to be: %s\n"+ + "But it was: %s", + i, + test.WorldCoord, + chunkCoord, + test.ExpectRelative, + actualRelative, + ) + } + + if roundTrip != test.WorldCoord { + t.Errorf("Test %d: world coord %s in chunk %s\n"+ + "Did not survive round trip! Expected: %s\n"+ + "But it was: %s", + i, + test.WorldCoord, + chunkCoord, + test.WorldCoord, + roundTrip, + ) + } + } +} diff --git a/pkg/level/chunker_zipfile.go b/pkg/level/chunker_zipfile.go index 8eb04d6..7ca4ced 100644 --- a/pkg/level/chunker_zipfile.go +++ b/pkg/level/chunker_zipfile.go @@ -93,7 +93,7 @@ func (c *Chunker) MigrateZipfile(zf *zip.Writer) error { } // Verify that this chunk file in the old ZIP was not empty. - chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, point) + chunk, err := c.ChunkFromZipfile(point) if err == nil && chunk.Len() == 0 { log.Debug("Skip chunk %s (old zipfile chunk was empty)", coord) continue @@ -205,14 +205,20 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error { } // ChunkFromZipfile loads a chunk from a zipfile. -func ChunkFromZipfile(zf *zip.Reader, layer int, coord render.Point) (*Chunk, error) { +func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) { // File names? var ( + zf = c.Zipfile + layer = c.Layer + binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord) jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord) chunk = NewChunk() ) + chunk.Point = coord + chunk.Size = c.Size + // Read from the new binary format. if file, err := zf.Open(binfile); err == nil { // log.Debug("Reading binary compressed chunk from %s", binfile) diff --git a/pkg/level/fmt_maintenance.go b/pkg/level/fmt_maintenance.go index e7a2268..7e45198 100644 --- a/pkg/level/fmt_maintenance.go +++ b/pkg/level/fmt_maintenance.go @@ -4,6 +4,21 @@ import "git.kirsle.net/SketchyMaze/doodle/pkg/log" // Maintenance functions for the file format on disk. +// Vacuum runs any maintenance or migration tasks for the level at time of save. +// +// It will prune broken links between actors, or migrate internal data structures +// to optimize storage on disk of its binary data. +func (m *Level) Vacuum() error { + if links := m.PruneLinks(); links > 0 { + log.Debug("Vacuum: removed %d broken links between actors in this level.") + } + + // Let the Chunker optimize accessor types. + m.Chunker.OptimizeChunkerAccessors() + + return nil +} + // PruneLinks cleans up any Actor Links that can not be resolved in the // level data. For example, if actors were linked in Edit Mode and one // actor is deleted leaving a broken link. diff --git a/pkg/level/fmt_readwrite.go b/pkg/level/fmt_readwrite.go index 4981f92..2833631 100644 --- a/pkg/level/fmt_readwrite.go +++ b/pkg/level/fmt_readwrite.go @@ -3,6 +3,7 @@ package level import ( "fmt" "io/ioutil" + "os" "runtime" "strings" @@ -96,7 +97,9 @@ func (m *Level) WriteFile(filename string) error { m.GameVersion = branding.Version // Maintenance functions, clean up cruft before save. - m.PruneLinks() + if err := m.Vacuum(); err != nil { + log.Error("Vacuum level %s: %s", filename, err) + } bin, err := m.ToJSON() if err != nil { @@ -115,7 +118,7 @@ func (m *Level) WriteFile(filename string) error { } // Desktop: write to disk. - err = ioutil.WriteFile(filename, bin, 0644) + err = os.WriteFile(filename, bin, 0644) if err != nil { return fmt.Errorf("level.WriteFile: %s", err) } diff --git a/pkg/level/rle/rle.go b/pkg/level/rle/rle.go new file mode 100644 index 0000000..2f44875 --- /dev/null +++ b/pkg/level/rle/rle.go @@ -0,0 +1,189 @@ +// Package rle contains support for Run-Length Encoding of level chunks. +package rle + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "strings" + + "git.kirsle.net/SketchyMaze/doodle/pkg/log" + "git.kirsle.net/go/render" +) + +const NullColor = 0xFFFF + +// Grid is a 2D array of nullable integers to store a flat bitmap of a chunk. +type Grid [][]*uint64 + +// NewGrid will return an initialized 2D grid of equal dimensions of the given size. +// +// The grid is indexed in [Y][X] notation, or: by row first and then column. +func NewGrid(size int) (Grid, error) { + if size == 0 { + return nil, errors.New("no size given for RLE Grid: the chunker was probably not initialized") + } + + var grid = make([][]*uint64, size+1) + for i := 0; i < size+1; i++ { + grid[i] = make([]*uint64, size+1) + } + + return grid, nil +} + +func MustGrid(size int) Grid { + grid, err := NewGrid(size) + if err != nil { + panic(err) + } + return grid +} + +type Pixel struct { + Point render.Point + Palette int +} + +// Size of the grid. +func (g Grid) Size() int { + return len(g[0]) +} + +// Compress the grid into a byte stream of RLE compressed data. +// +// The compressed format is a stream of: +// +// - A Uvarint for the palette index (0-255) or 0xffff (65535) for null. +// - A Uvarint for how many pixels to repeat that color. +func (g Grid) Compress() ([]byte, error) { + log.Error("BEGIN Compress()") + // log.Warn("Visualized:\n%s", g.Visualize()) + + // Run-length encode the grid. + var ( + compressed []byte // final result + lastColor uint64 // last color seen (current streak) + runLength uint64 // current streak for the last color + buffering bool // detect end of grid + + // Flush the buffer + flush = func() { + // log.Info("flush: %d for %d length", lastColor, runLength) + compressed = binary.AppendUvarint(compressed, lastColor) + compressed = binary.AppendUvarint(compressed, runLength) + } + ) + + for y, row := range g { + for x, nullableIndex := range row { + var index uint64 + if nullableIndex == nil { + index = NullColor + } else { + index = *nullableIndex + } + + // First color of the grid + if y == 0 && x == 0 { + // log.Info("First color @ %dx%d is %d", x, y, index) + lastColor = index + runLength = 1 + continue + } + + // Buffer it until we get a change of color or EOF. + if index != lastColor { + // log.Info("Color %d streaks for %d until %dx%d", lastColor, runLength, x, y) + flush() + lastColor = index + runLength = 1 + buffering = false + continue + } + + buffering = true + runLength++ + } + } + + // Flush the final buffer when we got to EOF on the grid. + if buffering { + flush() + } + + // log.Error("RLE compressed: %v", compressed) + + return compressed, nil +} + +// Decompress the RLE byte stream back into a populated 2D grid. +func (g Grid) Decompress(compressed []byte) error { + log.Error("BEGIN Decompress()") + // log.Warn("Visualized:\n%s", g.Visualize()) + + // Prepare the 2D grid to decompress the RLE stream into. + var ( + size = g.Size() + x, y, cursor int + ) + + var reader = bytes.NewBuffer(compressed) + + for { + var ( + paletteIndexRaw, err1 = binary.ReadUvarint(reader) + repeatCount, err2 = binary.ReadUvarint(reader) + ) + + if err1 != nil || err2 != nil { + break + } + + // Handle the null color. + var paletteIndex *uint64 + if paletteIndexRaw != NullColor { + paletteIndex = &paletteIndexRaw + } + + // log.Warn("RLE index %v for %dpx", paletteIndexRaw, repeatCount) + + for i := uint64(0); i < repeatCount; i++ { + cursor++ + if cursor%size == 0 { + y++ + x = 0 + } + + point := render.NewPoint(int(x), int(y)) + if point.Y >= size || point.X >= size { + continue + } + g[point.Y][point.X] = paletteIndex + + x++ + } + } + + // log.Warn("Visualized:\n%s", g.Visualize()) + + return nil +} + +// Visualize the state of the 2D grid. +func (g Grid) Visualize() string { + var lines []string + for _, row := range g { + var line = "[" + for _, col := range row { + if col == nil { + line += " " + } else { + line += fmt.Sprintf("%x", *col) + } + } + lines = append(lines, line+"]") + } + return strings.Join(lines, "\n") +} diff --git a/pkg/level/rle/rle_test.go b/pkg/level/rle/rle_test.go new file mode 100644 index 0000000..858d2ea --- /dev/null +++ b/pkg/level/rle/rle_test.go @@ -0,0 +1,43 @@ +package rle_test + +import ( + "testing" + + "git.kirsle.net/SketchyMaze/doodle/pkg/level/rle" +) + +func TestRLE(t *testing.T) { + + // Test a completely filled grid. + var ( + grid = rle.MustGrid(128) + color = uint64(5) + ) + for y := range grid { + for x := range y { + grid[y][x] = &color + } + } + + // Compress and decompress it. + var ( + compressed, _ = grid.Compress() + grid2 = rle.MustGrid(128) + ) + grid2.Decompress(compressed) + + // Ensure our color is set everywhere. + for y := range grid { + for x := range y { + if grid[y][x] != &color { + t.Errorf("RLE compression didn't survive the round trip: %d,%d didn't save\n"+ + " Expected: %d\n"+ + " Actually: %v", + x, y, + color, + grid[y][x], + ) + } + } + } +} diff --git a/pkg/uix/canvas_editable.go b/pkg/uix/canvas_editable.go index 90977e2..9ef096f 100644 --- a/pkg/uix/canvas_editable.go +++ b/pkg/uix/canvas_editable.go @@ -420,7 +420,7 @@ func (w *Canvas) loopEditable(ev *event.State) error { baseColor, err := chunker.Get(cursor) if err != nil { limit = balance.FloodToolVoidLimit - log.Warn("FloodTool: couldn't get base color at %s: %s (got %s)", cursor, err, baseColor.Color) + log.Warn("FloodTool: couldn't get base color at %s: %s (got %+v)", cursor, err, baseColor) } // If no change, do nothing. -- 2.30.2 From 4851730ccf6b449f22bec478d92b4d60cd5e4eb1 Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Fri, 24 May 2024 13:54:41 -0700 Subject: [PATCH 3/6] Fix RLE Encoding Off-by-One Errors [PTO] Levels can now be converted to RLE encoded chunk accessors and be re-saved continuously without any loss of information. Off-by-one errors resolved: * The rle.NewGrid() was adding a +1 everywhere making the 2D grids have 129 elements to a side for a 128 chunk size. * In rle.Decompress() the cursor value and translation to X,Y coordinates is fixed to avoid a pixel going missing at the end of the first row (128,0) * The abs.X-- hack in UnmarshalBinary is no longer needed to prevent the chunks from scooting a pixel to the right on every save. Doodad tool updates: * Remove unused CLI flags in `doodad resave` (actors, chunks, script, attachment, verbose) and add a `--output` flag to save to a different file name to the original. * Update `doodad show` to allow debugging of RLE compressed chunks: * CLI flag `--chunk=1,2` to specify a single chunk coordinate to debug * CLI flag `--visualize-rle` will Visualize() RLE compressed chunks in their 2D grid form in your terminal window (VERY noisy for large levels! Use the --chunk option to narrow to one chunk). Bug fixes and misc changes: * Chunk.Usage() to return a better percentage of chunk utilization. * Chunker.ChunkFromZipfile() was split out into two functions: * RawChunkFromZipfile retrieves the raw bytes of the chunk as well as the file extension discovered (.bin or .json) so the caller can interpret the bytes correctly. * ChunkFromZipfile calls the former function and then depending on file extension, unmarshals from binary or json. * The Raw function enables the `doodad show` command to debug and visualize the raw contents of the RLE compressed chunks. * Updated the Visualize() function for the RLE encoder: instead of converting palette indexes to hex (0-F) which would begin causing problems for palette indexes above 16 (as they would use two+ characters), indexes are mapped to a wider range of symbols (0-9A-Z) and roll over if you have more than 36 colors on your level. This at least keeps the Visualize() grid an easy to read 128x128 characters in your terminal. --- cmd/doodad/commands/resave.go | 41 ++++++++++---------- cmd/doodad/commands/show.go | 57 +++++++++++++++++++++++++++- pkg/level/chunk.go | 5 ++- pkg/level/chunk_rle.go | 26 ++++++------- pkg/level/chunker_migrate.go | 10 ++--- pkg/level/chunker_zipfile.go | 71 +++++++++++++++++++++-------------- pkg/level/rle/rle.go | 42 +++++++++++++-------- 7 files changed, 167 insertions(+), 85 deletions(-) diff --git a/cmd/doodad/commands/resave.go b/cmd/doodad/commands/resave.go index 75f6837..495b162 100644 --- a/cmd/doodad/commands/resave.go +++ b/cmd/doodad/commands/resave.go @@ -21,27 +21,10 @@ func init() { Usage: "load and re-save a level or doodad file to migrate to newer file format versions", ArgsUsage: "<.level or .doodad>", Flags: []cli.Flag{ - &cli.BoolFlag{ - Name: "actors", - Usage: "print verbose actor data in Level files", - }, - &cli.BoolFlag{ - Name: "chunks", - Usage: "print verbose data about all the pixel chunks in a file", - }, - &cli.BoolFlag{ - Name: "script", - Usage: "print the script from a doodad file and exit", - }, &cli.StringFlag{ - Name: "attachment", - Aliases: []string{"a"}, - Usage: "print the contents of the attached filename to terminal", - }, - &cli.BoolFlag{ - Name: "verbose", - Aliases: []string{"v"}, - Usage: "print verbose output (all verbose flags enabled)", + Name: "output", + Aliases: []string{"o"}, + Usage: "write to a different file than the input", }, }, Action: func(c *cli.Context) error { @@ -84,6 +67,18 @@ func resaveLevel(c *cli.Context, filename string) error { log.Info("Loaded level from file: %s", filename) log.Info("Last saved game version: %s", lvl.GameVersion) + // Different output filename? + if output := c.String("output"); output != "" { + log.Info("Output will be saved to: %s", output) + filename = output + } + + if err := lvl.Vacuum(); err != nil { + log.Error("Vacuum error: %s", err) + } else { + log.Info("Run vacuum on level file.") + } + log.Info("Saving back to disk") if err := lvl.WriteJSON(filename); err != nil { return fmt.Errorf("couldn't write %s: %s", filename, err) @@ -100,6 +95,12 @@ func resaveDoodad(c *cli.Context, filename string) error { log.Info("Loaded doodad from file: %s", filename) log.Info("Last saved game version: %s", dd.GameVersion) + // Different output filename? + if output := c.String("output"); output != "" { + log.Info("Output will be saved to: %s", output) + filename = output + } + log.Info("Saving back to disk") if err := dd.WriteJSON(filename); err != nil { return fmt.Errorf("couldn't write %s: %s", filename, err) diff --git a/cmd/doodad/commands/show.go b/cmd/doodad/commands/show.go index e43839c..9093607 100644 --- a/cmd/doodad/commands/show.go +++ b/cmd/doodad/commands/show.go @@ -1,6 +1,8 @@ package commands import ( + "bytes" + "encoding/binary" "fmt" "path/filepath" "sort" @@ -9,6 +11,7 @@ import ( "git.kirsle.net/SketchyMaze/doodle/pkg/doodads" "git.kirsle.net/SketchyMaze/doodle/pkg/enum" "git.kirsle.net/SketchyMaze/doodle/pkg/level" + "git.kirsle.net/SketchyMaze/doodle/pkg/level/rle" "git.kirsle.net/SketchyMaze/doodle/pkg/log" "github.com/urfave/cli/v2" ) @@ -44,6 +47,14 @@ func init() { Aliases: []string{"v"}, Usage: "print verbose output (all verbose flags enabled)", }, + &cli.BoolFlag{ + Name: "visualize-rle", + Usage: "visually dump RLE encoded chunks to the terminal (VERY noisy for large drawings!)", + }, + &cli.StringFlag{ + Name: "chunk", + Usage: "specific chunk coordinate; when debugging chunks, only show this chunk (example: 2,-1)", + }, }, Action: func(c *cli.Context) error { if c.NArg() < 1 { @@ -263,6 +274,10 @@ func showChunker(c *cli.Context, ch *level.Chunker) { chunkSize = int(ch.Size) width = worldSize.W - worldSize.X height = worldSize.H - worldSize.Y + + // Chunk debugging CLI options. + visualize = c.Bool("visualize-rle") + specificChunk = c.String("chunk") ) fmt.Println("Chunks:") fmt.Printf(" Pixels Per Chunk: %d^2\n", ch.Size) @@ -278,7 +293,18 @@ func showChunker(c *cli.Context, ch *level.Chunker) { // Verbose chunk information. if c.Bool("chunks") || c.Bool("verbose") { fmt.Println(" Chunk Details:") - for point, chunk := range ch.Chunks { + for point := range ch.IterChunks() { + // Debugging specific chunk coordinate? + if specificChunk != "" && point.String() != specificChunk { + log.Warn("Skip chunk %s: not the specific chunk you're looking for", point) + continue + } + + chunk, ok := ch.GetChunk(point) + if !ok { + continue + } + fmt.Printf(" - Coord: %s\n", point) fmt.Printf(" Type: %s\n", chunkTypeToName(chunk.Type)) fmt.Printf(" Range: (%d,%d) ... (%d,%d)\n", @@ -287,6 +313,33 @@ func showChunker(c *cli.Context, ch *level.Chunker) { (int(point.X)*chunkSize)+chunkSize, (int(point.Y)*chunkSize)+chunkSize, ) + fmt.Printf(" Usage: %f (%d len of %d)\n", chunk.Usage(), chunk.Len(), chunkSize*chunkSize) + + // Visualize the RLE encoded chunks? + if visualize && chunk.Type == level.RLEType { + ext, bin, err := ch.RawChunkFromZipfile(point) + if err != nil { + log.Error(err.Error()) + continue + } else if ext != ".bin" { + log.Error("Unexpected filetype for RLE compressed chunk (expected .bin, got %s)", ext) + continue + } + + // Read off the first byte (chunk type) + var reader = bytes.NewBuffer(bin) + binary.ReadUvarint(reader) + bin = reader.Bytes() + + grid, err := rle.NewGrid(chunkSize) + if err != nil { + log.Error(err.Error()) + continue + } + + grid.Decompress(bin) + fmt.Println(grid.Visualize()) + } } } else { fmt.Println(" Use -chunks or -verbose to serialize Chunks") @@ -298,6 +351,8 @@ func chunkTypeToName(v uint64) string { switch v { case level.MapType: return "map" + case level.RLEType: + return "rle map" case level.GridType: return "grid" default: diff --git a/pkg/level/chunk.go b/pkg/level/chunk.go index f4bc28a..80d5d53 100644 --- a/pkg/level/chunk.go +++ b/pkg/level/chunk.go @@ -330,8 +330,9 @@ func (c *Chunk) SizePositive() render.Rect { } // Usage returns the percent of free space vs. allocated pixels in the chunk. -func (c *Chunk) Usage(size int) float64 { - return float64(c.Len()) / float64(size) +func (c *Chunk) Usage() float64 { + size := float64(c.Size) + return float64(c.Len()) / (size * size) } // UnmarshalJSON loads the chunk from JSON and uses the correct accessor to diff --git a/pkg/level/chunk_rle.go b/pkg/level/chunk_rle.go index 09c8920..0744552 100644 --- a/pkg/level/chunk_rle.go +++ b/pkg/level/chunk_rle.go @@ -77,18 +77,21 @@ func (a *RLEAccessor) MarshalBinary() ([]byte, error) { } // Populate the dense 2D array of its pixels. - for px := range a.Iter() { - var ( - point = render.NewPoint(px.X, px.Y) - relative = RelativeCoordinate(point, a.chunk.Point, a.chunk.Size) - ptr = uint64(px.Swatch.Index()) - ) + for y, row := range grid { + for x := range row { + var ( + relative = render.NewPoint(x, y) + absolute = FromRelativeCoordinate(relative, a.chunk.Point, a.chunk.Size) + swatch, err = a.Get(absolute) + ) - // TODO: sometimes we get a -1 value in X or Y, not sure why. - if relative.X < 0 || relative.Y < 0 { - continue + if err != nil { + continue + } + + var ptr = uint64(swatch.Index()) + grid[relative.Y][relative.X] = &ptr } - grid[relative.Y][relative.X] = &ptr } return grid.Compress() @@ -119,10 +122,7 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { continue } - // TODO: x-1 to avoid the level creeping to the right every save, - // not sure on the root cause! RLEAccessor Decompress? abs := FromRelativeCoordinate(render.NewPoint(x, y), a.chunk.Point, a.chunk.Size) - abs.X -= 1 a.acc.grid[abs] = NewSparseSwatch(int(*col)) } } diff --git a/pkg/level/chunker_migrate.go b/pkg/level/chunker_migrate.go index 7b007cc..5b70fe2 100644 --- a/pkg/level/chunker_migrate.go +++ b/pkg/level/chunker_migrate.go @@ -14,9 +14,6 @@ import ( // and possibly migrate them to a different Accessor implementation when // saving on disk. func (c *Chunker) OptimizeChunkerAccessors() { - c.chunkMu.Lock() - defer c.chunkMu.Unlock() - log.Info("Optimizing Chunker Accessors") // TODO: parallelize this with goroutines @@ -31,7 +28,6 @@ func (c *Chunker) OptimizeChunkerAccessors() { defer wg.Done() for chunk := range chunks { var point = chunk.Point - log.Warn("Chunk %s is a: %d", point, chunk.Type) // Upgrade all MapTypes into RLE compressed MapTypes? if balance.RLEBinaryChunkerEnabled { @@ -49,7 +45,11 @@ func (c *Chunker) OptimizeChunkerAccessors() { } // Feed it the chunks. - for _, chunk := range c.Chunks { + for point := range c.IterChunks() { + chunk, ok := c.GetChunk(point) + if !ok { + continue + } chunks <- chunk } diff --git a/pkg/level/chunker_zipfile.go b/pkg/level/chunker_zipfile.go index 7ca4ced..7413515 100644 --- a/pkg/level/chunker_zipfile.go +++ b/pkg/level/chunker_zipfile.go @@ -4,7 +4,7 @@ import ( "archive/zip" "errors" "fmt" - "io/ioutil" + "io" "regexp" "strconv" @@ -206,6 +206,42 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error { // ChunkFromZipfile loads a chunk from a zipfile. func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) { + // Grab the chunk (bin or json) from the Zipfile. + ext, bin, err := c.RawChunkFromZipfile(coord) + if err != nil { + return nil, err + } + + var chunk = NewChunk() + chunk.Point = coord + chunk.Size = c.Size + + switch ext { + case ".bin": + // New style .bin compressed format: + // Either a MapAccessor compressed bin, or RLE compressed. + err = chunk.UnmarshalBinary(bin) + if err != nil { + log.Error("ChunkFromZipfile(%s): %s", coord, err) + return nil, err + } + case ".json": + // Legacy style plain .json file (MapAccessor only). + err = chunk.UnmarshalJSON(bin) + if err != nil { + return nil, err + } + default: + return nil, fmt.Errorf("unexpected filetype found for this chunk: %s", ext) + } + + return chunk, nil +} + +// RawChunkFromZipfile loads a chunk from a zipfile and returns its raw binary content. +// +// Returns the file extension (".bin" or ".json"), raw bytes, and an error. +func (c *Chunker) RawChunkFromZipfile(coord render.Point) (string, []byte, error) { // File names? var ( zf = c.Zipfile @@ -213,41 +249,18 @@ func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) { binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord) jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord) - chunk = NewChunk() ) - chunk.Point = coord - chunk.Size = c.Size - // Read from the new binary format. if file, err := zf.Open(binfile); err == nil { - // log.Debug("Reading binary compressed chunk from %s", binfile) - bin, err := ioutil.ReadAll(file) - if err != nil { - return nil, err - } - - err = chunk.UnmarshalBinary(bin) - if err != nil { - log.Error("ChunkFromZipfile(%s): %s", coord, err) - return nil, err - } + data, err := io.ReadAll(file) + return ".bin", data, err } else if file, err := zf.Open(jsonfile); err == nil { - // log.Debug("Reading JSON encoded chunk from %s", jsonfile) - bin, err := ioutil.ReadAll(file) - if err != nil { - return nil, err - } - - err = chunk.UnmarshalJSON(bin) - if err != nil { - return nil, err - } - } else { - return nil, err + data, err := io.ReadAll(file) + return ".json", data, err } - return chunk, nil + return "", nil, errors.New("not found in zipfile") } // ChunksInZipfile returns the list of chunk coordinates in a zipfile. diff --git a/pkg/level/rle/rle.go b/pkg/level/rle/rle.go index 2f44875..dfd2230 100644 --- a/pkg/level/rle/rle.go +++ b/pkg/level/rle/rle.go @@ -5,10 +5,8 @@ import ( "bytes" "encoding/binary" "errors" - "fmt" "strings" - "git.kirsle.net/SketchyMaze/doodle/pkg/log" "git.kirsle.net/go/render" ) @@ -25,9 +23,9 @@ func NewGrid(size int) (Grid, error) { return nil, errors.New("no size given for RLE Grid: the chunker was probably not initialized") } - var grid = make([][]*uint64, size+1) - for i := 0; i < size+1; i++ { - grid[i] = make([]*uint64, size+1) + var grid = make([][]*uint64, size) + for i := 0; i < size; i++ { + grid[i] = make([]*uint64, size) } return grid, nil @@ -58,7 +56,7 @@ func (g Grid) Size() int { // - A Uvarint for the palette index (0-255) or 0xffff (65535) for null. // - A Uvarint for how many pixels to repeat that color. func (g Grid) Compress() ([]byte, error) { - log.Error("BEGIN Compress()") + // log.Error("BEGIN Compress()") // log.Warn("Visualized:\n%s", g.Visualize()) // Run-length encode the grid. @@ -120,13 +118,14 @@ func (g Grid) Compress() ([]byte, error) { // Decompress the RLE byte stream back into a populated 2D grid. func (g Grid) Decompress(compressed []byte) error { - log.Error("BEGIN Decompress()") + // log.Error("BEGIN Decompress() Length of stream: %d", len(compressed)) // log.Warn("Visualized:\n%s", g.Visualize()) // Prepare the 2D grid to decompress the RLE stream into. var ( - size = g.Size() - x, y, cursor int + size = g.Size() + x, y = -1, -1 + cursor int ) var reader = bytes.NewBuffer(compressed) @@ -147,22 +146,19 @@ func (g Grid) Decompress(compressed []byte) error { paletteIndex = &paletteIndexRaw } - // log.Warn("RLE index %v for %dpx", paletteIndexRaw, repeatCount) + // log.Warn("RLE index %v for %dpx - coord=%d,%d", paletteIndexRaw, repeatCount, x, y) for i := uint64(0); i < repeatCount; i++ { - cursor++ if cursor%size == 0 { y++ x = 0 } point := render.NewPoint(int(x), int(y)) - if point.Y >= size || point.X >= size { - continue - } g[point.Y][point.X] = paletteIndex x++ + cursor++ } } @@ -180,10 +176,26 @@ func (g Grid) Visualize() string { if col == nil { line += " " } else { - line += fmt.Sprintf("%x", *col) + line += Alphabetize(col) } } lines = append(lines, line+"]") } return strings.Join(lines, "\n") } + +const alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" + +// Alphabetize converts a palette index value into a single character for +// Visualize to display. +// +// It supports up to 36 palette indexes before it will wrap back around and +// begin reusing symbols. +func Alphabetize(value *uint64) string { + if value == nil { + return " " + } + + var i = int(*value) + return string(alphabet[i%len(alphabet)]) +} -- 2.30.2 From 6be2f86b583fe1fac22db382883b907b2d919aac Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Fri, 24 May 2024 15:03:32 -0700 Subject: [PATCH 4/6] RLE Encoding Code Cleanup [PTO] * For the doodad tool: skip the assets embed folder, the doodad binary doesn't need to include all the game's doodads/levelpacks/etc. and can save on file size. * In `doodad resave`, .doodad files with Vacuum() and upgrade their chunker from the MapAccessor to the RLEAccessor. * Fix a rare concurrent map read/write error in OptimizeChunkerAccessors. --- Building.md | 8 ++++++++ Makefile | 10 +++++----- assets/assets_embed.go | 3 +++ assets/assets_omitted.go | 32 ++++++++++++++++++++++++++++++++ cmd/doodad/commands/resave.go | 6 ++++++ pkg/branding/branding.go | 2 +- pkg/doodads/fmt_maintenance.go | 14 ++++++++++++++ pkg/doodads/fmt_readwrite.go | 5 +++++ pkg/level/chunker_migrate.go | 3 +++ 9 files changed, 77 insertions(+), 6 deletions(-) create mode 100644 assets/assets_omitted.go create mode 100644 pkg/doodads/fmt_maintenance.go diff --git a/Building.md b/Building.md index 60512f0..442897c 100644 --- a/Building.md +++ b/Building.md @@ -15,6 +15,7 @@ - [Build on macOS from scratch](#build-on-macos-from-scratch) - [WebAssembly](#webassembly) - [Build Tags](#build-tags) + - [doodad](#doodad) - [dpp](#dpp) # Dockerfile @@ -373,6 +374,13 @@ Some tips to get a WASM build to work: Go build tags used by this game: +## doodad + +This tag is used when building the `doodad` command-line tool. + +It ensures that the embedded bindata assets (built-in doodads, etc.) do not +need to be bundled into the doodad binary, but only the main game binary. + ## dpp The dpp tag stands for Doodle++ and is used for official commercial builds of diff --git a/Makefile b/Makefile index 0efce21..8fb40ca 100644 --- a/Makefile +++ b/Makefile @@ -23,7 +23,7 @@ setup: clean .PHONY: build build: go build $(LDFLAGS) $(BUILD_TAGS) -o bin/sketchymaze cmd/doodle/main.go - go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad cmd/doodad/main.go + go build $(LDFLAGS) -tags=doodad -o bin/doodad cmd/doodad/main.go # `make buildall` to run all build steps including doodads. .PHONY: buildall @@ -34,7 +34,7 @@ buildall: doodads build build-free: gofmt -w . go build $(LDFLAGS) -o bin/sketchymaze cmd/doodle/main.go - go build $(LDFLAGS) -o bin/doodad cmd/doodad/main.go + go build $(LDFLAGS) -tags=doodad -o bin/doodad cmd/doodad/main.go # `make bindata` generates the embedded binary assets package. .PHONY: bindata @@ -75,7 +75,7 @@ mingw: go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ - go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad.exe cmd/doodad/main.go + go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go # `make mingw32` to cross-compile a Windows binary with mingw (32-bit). .PHONY: mingw32 @@ -85,7 +85,7 @@ mingw32: go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go env CGO_ENABLED="1" CC="/usr/bin/i686-w64-mingw32-gcc" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ - go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad.exe cmd/doodad/main.go + go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go # `make mingw-free` for Windows binary in free mode. .PHONY: mingw-free @@ -95,7 +95,7 @@ mingw-free: go build $(LDFLAGS_W) -o bin/sketchymaze.exe cmd/doodle/main.go env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ - go build $(LDFLAGS) -o bin/doodad.exe cmd/doodad/main.go + go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go # `make release` runs the release.sh script, must be run # after `make dist` diff --git a/assets/assets_embed.go b/assets/assets_embed.go index ac18c67..2a73cda 100644 --- a/assets/assets_embed.go +++ b/assets/assets_embed.go @@ -1,3 +1,6 @@ +//go:build !doodad +// +build !doodad + // Package assets gets us off go-bindata by using Go 1.16 embed support. // // For Go 1.16 embed, this source file had to live inside the assets/ folder diff --git a/assets/assets_omitted.go b/assets/assets_omitted.go new file mode 100644 index 0000000..7f94524 --- /dev/null +++ b/assets/assets_omitted.go @@ -0,0 +1,32 @@ +//go:build doodad +// +build doodad + +// Dummy version of assets_embed.go that doesn't embed any files. +// For the `doodad` tool. + +package assets + +import ( + "embed" + "errors" +) + +var Embedded embed.FS + +var errNotEmbedded = errors.New("assets not embedded") + +// AssetDir returns the list of embedded files at the directory name. +func AssetDir(name string) ([]string, error) { + return nil, errNotEmbedded +} + +// Asset returns the byte data of an embedded asset. +func Asset(name string) ([]byte, error) { + return nil, errNotEmbedded +} + +// AssetNames dumps the names of all embedded assets, +// with their legacy "assets/" prefix from go-bindata. +func AssetNames() []string { + return nil +} diff --git a/cmd/doodad/commands/resave.go b/cmd/doodad/commands/resave.go index 495b162..e317c2b 100644 --- a/cmd/doodad/commands/resave.go +++ b/cmd/doodad/commands/resave.go @@ -101,6 +101,12 @@ func resaveDoodad(c *cli.Context, filename string) error { filename = output } + if err := dd.Vacuum(); err != nil { + log.Error("Vacuum error: %s", err) + } else { + log.Info("Run vacuum on doodad file.") + } + log.Info("Saving back to disk") if err := dd.WriteJSON(filename); err != nil { return fmt.Errorf("couldn't write %s: %s", filename, err) diff --git a/pkg/branding/branding.go b/pkg/branding/branding.go index 1c2a332..cad8053 100644 --- a/pkg/branding/branding.go +++ b/pkg/branding/branding.go @@ -9,7 +9,7 @@ import ( const ( AppName = "Sketchy Maze" Summary = "A drawing-based maze game" - Version = "0.14.0" + Version = "0.14.1" Website = "https://www.sketchymaze.com" Copyright = "2023 Noah Petherbridge" Byline = "a game by Noah Petherbridge." diff --git a/pkg/doodads/fmt_maintenance.go b/pkg/doodads/fmt_maintenance.go new file mode 100644 index 0000000..9b3099b --- /dev/null +++ b/pkg/doodads/fmt_maintenance.go @@ -0,0 +1,14 @@ +package doodads + +// Vacuum runs any maintenance or migration tasks for the level at time of save. +// +// It will prune broken links between actors, or migrate internal data structures +// to optimize storage on disk of its binary data. +func (m *Doodad) Vacuum() error { + // Let the Chunker optimize accessor types. + for _, layer := range m.Layers { + layer.Chunker.OptimizeChunkerAccessors() + } + + return nil +} diff --git a/pkg/doodads/fmt_readwrite.go b/pkg/doodads/fmt_readwrite.go index a50ae80..3d8e0bb 100644 --- a/pkg/doodads/fmt_readwrite.go +++ b/pkg/doodads/fmt_readwrite.go @@ -164,6 +164,11 @@ func (d *Doodad) WriteFile(filename string) error { d.Version = 1 d.GameVersion = branding.Version + // Maintenance functions, clean up cruft before save. + if err := d.Vacuum(); err != nil { + log.Error("Vacuum level %s: %s", filename, err) + } + bin, err := d.ToJSON() if err != nil { return err diff --git a/pkg/level/chunker_migrate.go b/pkg/level/chunker_migrate.go index 5b70fe2..60fce35 100644 --- a/pkg/level/chunker_migrate.go +++ b/pkg/level/chunker_migrate.go @@ -36,8 +36,11 @@ func (c *Chunker) OptimizeChunkerAccessors() { ma, _ := chunk.Accessor.(*MapAccessor) rle := NewRLEAccessor(chunk).FromMapAccessor(ma) + // Lock the chunker for updating. + c.chunkMu.Lock() c.Chunks[point].Type = RLEType c.Chunks[point].Accessor = rle + c.chunkMu.Unlock() } } } -- 2.30.2 From c7a3c7a79786b86d489c3cf52e8e03f33e62d71e Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Fri, 24 May 2024 16:05:37 -0700 Subject: [PATCH 5/6] Remove never-used GridType accessor + documentation [PTO] * Add documentation for the game's file formats and RLE encoding * Remove the never-used GridType Chunk Accessor constant --- README.md | 1 + cmd/doodad/commands/show.go | 2 - docs/Evolution of File Formats.md | 488 ++++++++++++++++++++++++++++++ docs/RLE Encoding for Levels.md | 116 +++++++ pkg/level/chunk.go | 1 - 5 files changed, 605 insertions(+), 3 deletions(-) create mode 100644 docs/Evolution of File Formats.md create mode 100644 docs/RLE Encoding for Levels.md diff --git a/README.md b/README.md index e947f4d..7289037 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,7 @@ Some to start with: * [Building](Building.md) the game (tl;dr. run bootstrap.py) * [Tour of the Code](docs/Tour%20of%20the%20Code.md) +* [Evolution of File Formats](docs/Evolution%20of%20File%20Formats.md) # Keybindings diff --git a/cmd/doodad/commands/show.go b/cmd/doodad/commands/show.go index 9093607..ff8f0b5 100644 --- a/cmd/doodad/commands/show.go +++ b/cmd/doodad/commands/show.go @@ -353,8 +353,6 @@ func chunkTypeToName(v uint64) string { return "map" case level.RLEType: return "rle map" - case level.GridType: - return "grid" default: return fmt.Sprintf("type %d", v) } diff --git a/docs/Evolution of File Formats.md b/docs/Evolution of File Formats.md new file mode 100644 index 0000000..a7677db --- /dev/null +++ b/docs/Evolution of File Formats.md @@ -0,0 +1,488 @@ +# Evolution of File Formats + +This document will cover the evolution of the game's primary file formats (Level and Doodad drawings): how the on-disk format has changed over time to better compress/optimize the drawing data, and how the game continued to maintain backwards compatibility. + +The game, so far, is always able to _read_ levels and doodads created by older versions (all the way back to the very first alpha build!) and, upon saving them, will convert the file format to the latest standard in order to optimize and reduce disk space usage. + +The game can generally be configured (by editing feature flag constants) to _output_ drawings in the various legacy formats as well. Between the v1 JSON and v2 Gzip-JSON formats, the game is able to translate back and forth. From v3 Zipfiles onwards, back-migrating drawing files is not a supported operation - it can always save drawings _forward_ but code is not in place to e.g. take Zipfile members and put them back into the root struct to revert to a classic JSON-style file. + +Table of Contents: + +- [Evolution of File Formats](#evolution-of-file-formats) +- [General Design](#general-design) + - [A common file format between Levels and Doodads](#a-common-file-format-between-levels-and-doodads) + - [Chunks and the Chunker](#chunks-and-the-chunker) + - [Chunk Accessors](#chunk-accessors) +- [File Format Versions](#file-format-versions) + - [v1: Simple JSON Format](#v1-simple-json-format) + - [v2: GZip compressed JSON](#v2-gzip-compressed-json) + - [v3: Zip archives with external chunks](#v3-zip-archives-with-external-chunks) + - [Structure of the Zipfile](#structure-of-the-zipfile) + - [How I made it backwards compatible](#how-i-made-it-backwards-compatible) + - [Migrating the ZIP file on every Save](#migrating-the-zip-file-on-every-save) + - [The old Chunks and Files become staging areas](#the-old-chunks-and-files-become-staging-areas) + - [v3.1: A binary chunk file format](#v31-a-binary-chunk-file-format) + - [v4: Run Length Encoding MapAccessor](#v4-run-length-encoding-mapaccessor) + - [File Size Savings](#file-size-savings) + - [First Quest](#first-quest) + - [Tutorial Levels](#tutorial-levels) + - [Azulian Tag](#azulian-tag) + - [Built-in Doodads](#built-in-doodads) + - [Game Binary Size](#game-binary-size) + +# General Design + +Some thought and planning went into this in the very beginning. This section covers the general design goals of Levels & Doodads, how their actual pixel data is managed (in chunks), and how I left it open-ended to experiment with different chunk accessor algorithms in the future. + +## A common file format between Levels and Doodads + +Under the hood, the file format for .level and .doodad files are extremely similar. They share a handful of properties in common at their root data structure: + +* Version (of the file format - still at version 1!) +* Game Version (that last saved this file) +* Title, Author, common metadata +* Attached file storage such as custom wallpapers + +Both levels and doodads also have a Chunker somewhere that stores the actual pixels-on-screen drawing data for them. + +Both file types have evolved together, and when optimizations are made for e.g. Level files, the Doodads automatically benefit too for sharing a lot of code in common. + +## Chunks and the Chunker + +The drawing data itself (pixels on screen) from the beginning was decided to be split into Chunks, and each Chunk would manage the pixels for its part of the overall drawing. This can enable arbitrarily large drawing sizes for levels and doodads, with theoretically "infinite" boundaries (within computer integer bounds). + +Each Level or Doodad file will have one or more Chunkers. The Chunker itself stores the common properties for the drawing (like the Chunk Size, e.g., 128 square pixels by default), and it manages translating from "world coordinates" of your drawing into "chunk coordinates", so it knows which Chunk is responsible for that part of the drawing. + +For an arbitrary world coordinate (like 900,-290) the Chunker can divide it by the Chunk Size of 128 and find that chunk coordinate (7,-2) is responsible for that chunk and asks it for its pixels. + +Levels currently only have one Chunker, but Doodads have many (one for each frame of animation they store). + +## Chunk Accessors + +There is support from the beginning for each Chunk to manage its own data in any way it wants to. For example, a Chunk that is completely filled by one color of pixel could store its information _much_ more succinctly than a chunk made up of very sparse lines, where each pixel coordinate needs to be accounted for. + +The first type of Chunk accessor was the **MapAccessor**, which stored the X,Y coordinates of each pixel mapped to their Palette color index (see the example below, in [v1: Simple JSON Format](#v1-simple-json-format)). + +It was planned that future accessors would be added such as a **GridAccessor** for very densely packed chunks (to store in a 2D array) and have the Chunker automatically decide which format is optimal to encode it but this was still never added. + +From the game's first alpha (0.0.9, July 9 2019) through version 0.14.0 (May 4 2024), the MapAccessor was the only one ever implemented. + +# File Format Versions + +## v1: Simple JSON Format + +At first, levels were just saved as simple JSON files (whitespace compressed only), which when pretty printed (and with comment annotations added) looked like this: + +```javascript +{ + // Common properties between levels and doodads + "version": 1, // json schema version, still at "1" today! + "gameVersion": "0.0.10-alpha", + "title": "Alpha", + "author": "Noah P", + "locked": false, // read locked/won't open in editor + "files": null, // attached files + "passwd": "", // level password (never used) + + // The drawing data itself, divided into chunks. + "chunks": { + "size": 128, + "chunks": { + // Chunk coordinate + "0,0": { + "type": 0, // 0 = MapAccessor chunk type + "data": { + // Each pixel coordinate mapped + // to a palette index number... + "69,32": 0, + "69,33": 0, + "70,34": 0, + } + } + } + }, + "palette": { + "swatches": [ + // indexed color palette for the drawing + { + "name": "solid", + "color": "#000000", + "solid": true + }, + { + "name": "decoration", + "color": "#999999" + }, + { + "name": "fire", + "color": "#ff0000", + "fire": true + }, + { + "name": "water", + "color": "#0000ff", + "water": true + } + ] + }, + "pageType": 2, // 2 = Bounded LevelType + "boundedWidth": 2550, + "boundedHeight": 3300, + "wallpaper": "notebook.png", + "actors": { + // doodads in your level, by their instanced ID + "4d193308-a52d-4153-a10d-a010445dd47b": { + "filename": "button.doodad", + "point": "154,74", + "links": [ + // linked actor IDs + "8d501581-0904-4dfb-a326-57330b2484be" + ] + }, + "8d501581-0904-4dfb-a326-57330b2484be": { + "filename": "electric-door.doodad", + "point": "320,74", + "links": [ + "4d193308-a52d-4153-a10d-a010445dd47b" + ] + } + } +} +``` + +A **doodad file** was very similar but had some other relevant properties in its JSON format, such as: + +* Their Size (dimensions) +* JavaScript source code +* Hitbox, Tags/Options + +A doodad file has one Palette like a level, but it has multiple chunkers (one for each layer of the doodad; which are how you store frames for animation or state changes). + +What level and doodads have in common is File storage for attaching files into them (such as custom wallpapers for a level, or sound effects for a doodad), with their binary data encoded to base64. + +In the first iteration of the file format, _all_ of this was encoded into the single JSON file on disk! + +For densely packed levels, though, the JSON file got really large quickly, even with the whitespace removed. + +## v2: GZip compressed JSON + +The second iteration was to basically add gzip compression to the level files, which slashed their file size considerably. + +How I made it backwards compatible: + +The game is able to open a ".level" file which is _either_ a straight JSON file from older versions of the game, or the new gzip compressed format. + +When **opening** a file, it: + +1. Checks if the first byte is the ASCII character `{`, and will parse it as legacy v1 JSON format. +2. Checks if the file's opening bytes are instead a gzip header (hex `1f8b`), and will load it from GZip (v2 file format). + +The GZip reader is basically a wrapper that decodes a JSON file with compression: + +```go +// pkg/level/fmt_json.go + +// FromGzip deserializes a gzip compressed level JSON. +func FromGzip(data []byte) (*Level, error) { + // This function works, do not touch. + var ( + level = New() + buf = bytes.NewBuffer(data) + reader *gzip.Reader + decoder *json.Decoder + ) + + reader, err := gzip.NewReader(buf) + if err != nil { + return nil, err + } + + decoder = json.NewDecoder(reader) + decoder.Decode(level) + + return level, nil +} +``` + +## v3: Zip archives with external chunks + +Very large or dense levels were resulting in enormous file sizes even with gzip compression, and they were taking a long time to load from disk since _all_ chunk data was still in one file! + +A loading screen feature was added to Sketchy Maze around this time because big levels could take _seconds_ to load. + +The level format was reworked again and now the .level file is basically a Zip archive with member files within. Most importantly, this enabled the drawing's chunks to be kicked out into separate files so we could manage "loading" the level more efficiently. + +### Structure of the Zipfile + +A level zipfile is laid out like so: + +``` +/ + level.json + assets/ + screenshots/ + large.png + chunks/ + 0/ + -1,0.json + -1,1.json + -2,0.json +``` + +The `level.json` file contains most of the basic metadata from the old file format, except the chunks are evicted and stored in separate JSON files by their chunk coordinate. Doodads will have a `doodad.json` file here instead. + +Notice the directory name **0/** holding the chunks for a level file: this zero is a layer ID to accommodate Doodads which share a similar file format. Levels only have one Layer (for now), so the directory name is always zero. Doodads will have each of their layers enumerated from 0, 1, 2, ... + +Attached assets such as wallpapers or embedded custom doodads would be regular ZIP file members under the assets/ folder. + +The level.json file as of v0.14.0 looks a bit like this: + +```json +{ + "version": 1, + "gameVersion": "0.14.0", + "title": "The Castle", + "author": "Noah P", + "locked": false, + "files": {}, // files are evicted to the assets/ folder + "passwd": "", + "uuid": "18f0f734-d7ad-4b10-be6d-f40d31334816", + "rules": { + "difficulty": 0 + }, + "chunks": { + "size": 128, + "chunks": {} // chunks are evicted as well + }, + "palette": { + "swatches": [ + { + "name": "grass", + "color": "#009900", + "pattern": "noise.png", + "solid": true + }, + { + "name": "fire", + "color": "#ff0000", + "pattern": "marker.png", + "fire": true + } + ] + }, + "pageType": 0, + "boundedWidth": 2550, + "boundedHeight": 3300, + "wallpaper": "graph.png", + "scroll": "-3072,51", + "actors": { + "0fac06dc-fe1e-11eb-9dfc-9cb6d0c2aa8b": { + "filename": "key-blue.doodad", + "point": "4403,239" + }, + "10554b0f-fe18-11eb-9dfc-9cb6d0c2aa8b": { + "filename": "door-blue.doodad", + "point": "3909,344" + }, + }, + "saveDoodads": false, + "saveBuiltins": false +} +``` + +### How I made it backwards compatible + +The function that reads Levels and Doodads from disk continues to check its headers: + +1. If the first byte is a `{` it's a legacy old drawing and is parsed as classic JSON (format v1) +2. If the first bytes are a GZip header, try loading it as a gz compressed JSON file (format v2) +3. If instead the header is a ZIP archive, open it and look for the `level.json` or `doodad.json` for the expected file you're opening. + +It was able to continue loading drawings from the _very_ earliest alpha of the game and can still load them today. + +### Migrating the ZIP file on every Save + +The upgrade path to save a legacy drawing in the modern ZIP format was very straightforward: + +In the root Level struct (what level.json decodes into): + +* Deprecate the "files" and "chunks" fields since these are evicted out into separate ZIP file members, so they should be empty in the new file format. +* So on level save, if the Level has any data in these fields (meaning you had loaded it from a legacy file format), evict those fields to clear them out when generating the new ZIP file. + +When an opened level _was_ a ZIP file to begin with, a pointer to the Zipfile handle is kept accessible to make saving those levels more efficient. When a level _wasn't_ a ZIP file, I basically create a new one, write the level.json and flush out its embedded files/chunks to their correct places. + +So, when you've opened a ZIP file level and you re-save it, the process is: + +1. First, copy any "cold storage" chunks from the old Zipfile to the new one. + * These are the chunks not actively in memory (see the next section about [staging areas](#the-old-chunks-and-files-become-staging-areas)) +2. Then, flush out any data in the legacy "chunks" or "files" sections to external zipfile members. + * This is the same logic for migrating an old gzip-json level, where _all_ its data was in these places... + * As well as flushing out recently edited chunks or recently attached files (per the next section). + +### The old Chunks and Files become staging areas + +In the base Level struct, the old keys where Files and Chunks used to be stored have now become the staging area for "warmed up" chunks or recently attached files. + +For example: the level.json file in the ZIP stores no data in these fields, and chunks are stored as separate members. Whenever the game **loads** a chunk from ZIP, it will cache it in the old Chunks structure so it has it warmed up and ready to use. + +When **playing** a level: there is a chunk loading/unloading algorithm that balances memory use during gameplay. Chunks which are currently on screen may be fetched from the ZIP file and cached in the legacy Chunks structure. The game will track which chunks are accessed on the current game tick (as well as the previous couple of ticks). + +If a chunk has not been accessed in a few game ticks, it is destroyed and removed from the legacy Chunks structure (along with its SDL2 texture being cleaned up, etc.); if the player scrolls the chunk back on screen, it is recalled from the ZIP file and cached again. + +When **editing** a level in the editor, any chunk that receives a local modification is also stored in the old Chunks structure, and is kept there until the next save: when all the loaded chunks are flushed out to ZIP files. Chunks with modifications are NOT flushed by the auto-loading/unloading algorithm so their changes don't get lost. + +## v3.1: A binary chunk file format + +At this point: there is still only one Chunk accessor (the MapAccessor) and its JSON files in the zip file still looked like (if pretty printed): + +```javascript +{ + "type": 0, + "data": { + "69,32": 1, // coordinate to palette index + "69,33": 2, + "70,34": 0, + } +} +``` + +The next iteration was to compress these down into a binary format to shrink them further by removing the extra JSON characters (quotes, brackets, etc.) and the ASCII human readable digits. + +In the ZIP file: the legacy chunks will have their .json file extension but the new binary format stores them into .bin files; so the game is able to load old and new levels by checking the file types available for their chunks. + +The binary format makes use of variable-length integers provided by Go's encoding/binary package. This is the same VarInt type from Protocol Buffers: small numbers encode to a few number of bytes, and large numbers may use additional bytes. + +* The **first** Uvarint in the binary format is the chunk type (0 = MapAccessor) +* The remaining data is arbitrary and up to that chunk accessor to handle how it wants. + +For the MapAccessor: the remaining binary data is a repeating stream of three varints: + +1. X coordinate +2. Y coordinate +3. Palette index number + +For migrating old JSON chunks into binary format: on save it will always output in the .bin format (by calling the chunk accessor's MarshalBinary method), but on reading is able to handle both .bin and legacy .json. + +## v4: Run Length Encoding MapAccessor + +After the release of v0.14.0 of the game, a new chunk accessor has _finally_ been added to the game: the **RLEAccessor**. + +The RLEAccessor is functionally identical to the MapAccessor, in that (in working memory) it stores a hash map of world coordinates to the palette color. But where the RLEAccessor is different is with the **on disk format** of how it encodes its chunks. + +The on-disk format uses binary (.bin) only, and compressed the chunk's pixel data using Run Length Encoding (RLE). The algorithm is basically: + +* When **compressing** your chunk data to save on disk: + * It creates a 2D grid array of integers in order to rasterize a complete bitmap of the chunk. + * For a chunk size of 128, this is a 128x128 2D array. + * The values are your palette index numbers (0 to N) + * "Null" colors that are blank in the chunk uses the value 0xFFFF. + * Note: the gameplay logic enforces only 256 colors per level palette, but theoretically 65,534 colors could be supported before the "null" color would collide. + * It then serializes the 2D bitmap using RLE with a series of packed Uvarints: + 1. The palette color to set + 2. The number of pixels to repeat that palette color. +* When **decompressing** the RLE encoded data, the process is reversed: + * It creates a 2D grid of your square chunk size again (all nulls) + * Then it decompresses the RLE encoded stream of Uvarints, filling out the grid from the top-left to bottom-right corner. + * Finally, it scans the grid to find non-null colors to populate its regular MapAccessor struct of points-to-colors. + +For a simple example: if a chunk consisted 100% of the same color on all 128x128 pixels, the compressed RLE stream contains only 3 or 4 bytes on disk: + +1. The palette index number +2. The repeat number (16,384 for a 128x128 chunk grid) + +For **migrating MapAccessors to RLEAccessors:** + +The game is still able to read legacy MapAccessor chunks, and when **saving** a drawing back to disk, it fans out and checks all your level chunks if they need to be optimized: + +* If their chunk type is a MapAccessor, copy the underlying map data into an RLEAccessor. +* Then when saving to disk, the RLEAccessor MarshalBinary() func will create the .bin file in the updated format on disk. + +### File Size Savings + +On average the RLE encoding slashes file sizes by over 90% for most levels, especially densely packed levels with lots of large colored areas. + +Here are examples from the game's built-in level packs. + +See [RLE Encoding for Levels](./RLE%20Encoding%20for%20Levels.md) for more breakdown of these numbers. + +#### First Quest + +| Filename | Orig Size | New Size | Reduction | +|--------------------|-----------|----------|-----------| +| Boat.level | 4.3M | 292K | 93% | +| Castle.level | 5.6M | 241K | 95% | +| Desert-1of2.level | 4.4M | 248K | 94% | +| Desert-2of2.level | 3.2M | 290K | 91% | +| Jungle.level | 11M | 581K | 94% | +| Shapeshifter.level | 22M | 263K | 98% | +| Thief 1.level | 538K | 193K | 64% | + +The combined levelpack ZIP file itself: + +* Filename: builtin-100-FirstQuest.levelpack +* Original: 50M (52369408) +* New size: 1.8M (1838542) 96% + +The most notable improvement is Shapeshifter.level, which features **large** chunks of solid color and it compressed by 98% with the RLE encoding! + +#### Tutorial Levels + +Many of the Tutorial levels are made of sparsely drawn "line art" rather than solid colored areas, so the reduction in filesize is closer to ~60% instead of 90%+ + +| Filename | Orig Size | New Size | Reduction | +|--------------------|-----------|----------|-----------| +| Tutorial 1.level | 186K | 111K | 40% | +| Tutorial 2.level | 680K | 229K | 66% | +| Tutorial 3.level | 409K | 148K | 64% | +| Tutorial 4.level | 901K | 376K | 58% | +| Tutorial 5.level | 3M | 645K | 78% | +| Zoo.level | 2.8M | 226K | 92% | + +The levelpack ZIP: + +* Filename: builtin-Tutorial.levelpack +* Original: 7.8M (8119658) +* New size: 1.6M (1650381) 79% + +#### Azulian Tag + +| Filename | Orig Size | New Size | Reduction | +|---------------------------|-----------|----------|-----------| +| AzulianTag-Forest.level | 17M | 312K | 98% | +| AzulianTag-Night.level | 702K | 145K | 79% | +| AzulianTag-Tutorial.level | 3.4M | 185K | 94% | + +The levelpack ZIP: + +* Filename: builtin-200-AzulianTag.levelpack +* Original: 21M (21824441) +* New size: 525K (537345) 97% + +#### Built-in Doodads + +The RLE compression also improved the file sizes of the game's built-in doodads. For a random spot check of some: + +| Filename | Orig Size | New Size | +|--------------------------|-----------|----------| +| anvil.doodad | 2.7K | 1.3K | +| azu-blu.doodad | 8.1K | 5.2K | +| azu-red.doodad | 8.1K | 5.2K | +| azu-white.doodad | 8.1K | 5.2K | +| box.doodad | 29K | 4.1K | +| boy.doodad | 30K | 8.1K | +| crumbly-floor.doodad | 15K | 3.3K | +| door-blue.doodad | 18K | 2.7K | +| electric-trapdoor.doodad | 9.5K | 2.8K | + +Total file size of all builtin doodads: + +* Original: 576.8 KiB +* New: 153.7 KiB (73% reduction) + +#### Game Binary Size + +The game binary embeds its built-in doodads and levelpacks directly, and so this optimization has also slashed the overall size of the game binary too: + +* Filename: sketchymaze +* Original: 105M +* New size: 30M, 71% smaller \ No newline at end of file diff --git a/docs/RLE Encoding for Levels.md b/docs/RLE Encoding for Levels.md new file mode 100644 index 0000000..d052808 --- /dev/null +++ b/docs/RLE Encoding for Levels.md @@ -0,0 +1,116 @@ +# RLE Encoding for Levels + +This documents some of the file size savings of the game's built-in levelpacks and doodads once the file format was migrated to use Run Length Encoding (RLE) for drawing chunks. + +See [Evolution of File Formats](./Evolution%20of%20File%20Formats.md) for a history of the game's file formats. + +# Levels + +The file sizes of the levels themselves: + +## First Quest + +| Filename | Orig Size | New Size | Reduction | +|--------------------|-----------|----------|-----------| +| Boat.level | 4.3M | 292K | 93% | +| Castle.level | 5.6M | 241K | 95% | +| Desert-1of2.level | 4.4M | 248K | 94% | +| Desert-2of2.level | 3.2M | 290K | 91% | +| Jungle.level | 11M | 581K | 94% | +| Shapeshifter.level | 22M | 263K | 98% | +| Thief 1.level | 538K | 193K | 64% | + +In raw bytes: + +| Filename | Orig Size | New Size | +|--------------------|-----------|----------| +| Boat.level | 4494184 | 298943 | +| Castle.level | 5854222 | 245872 | +| Desert-1of2.level | 4589382 | 253768 | +| Desert-2of2.level | 3310784 | 296681 | +| Jungle.level | 10928779 | 594601 | +| Shapeshifter.level | 22823811 | 269307 | +| Thief 1.level | 550579 | 196731 | + +The levelpack ZIP: + +* Filename: builtin-100-FirstQuest.levelpack +* Original: 50M (52369408) +* New size: 1.8M (1838542) 96% + +## Tutorial + +| Filename | Orig Size | New Size | Reduction | +|--------------------|-----------|----------|-----------| +| Tutorial 1.level | 186K | 111K | 40% | +| Tutorial 2.level | 680K | 229K | 66% | +| Tutorial 3.level | 409K | 148K | 64% | +| Tutorial 4.level | 901K | 376K | 58% | +| Tutorial 5.level | 3M | 645K | 78% | +| Zoo.level | 2.8M | 226K | 92% | + +In raw bytes: + +| Filename | Orig Size | New Size | +|--------------------|-----------|----------| +| Tutorial 1.level | 190171 | 113568 | +| Tutorial 2.level | 695936 | 233880 | +| Tutorial 3.level | 418490 | 150565 | +| Tutorial 4.level | 921781 | 384775 | +| Tutorial 5.level | 3059902 | 659487 | +| Zoo.level | 2925633 | 230712 | + +The levelpack ZIP: + +* Filename: builtin-Tutorial.levelpack +* Original: 7.8M (8119658) +* New size: 1.6M (1650381) 79% + +## Azulian Tag + +| Filename | Orig Size | New Size | Reduction | +|---------------------------|-----------|----------|-----------| +| AzulianTag-Forest.level | 17M | 312K | 98% | +| AzulianTag-Night.level | 702K | 145K | 79% | +| AzulianTag-Tutorial.level | 3.4M | 185K | 94% | + +In raw bytes: + +| Filename | Orig Size | New Size | +|---------------------------|-----------|----------| +| AzulianTag-Forest.level | 17662031 | 318547 | +| AzulianTag-Night.level | 718345 | 147612 | +| AzulianTag-Tutorial.level | 3508093 | 189310 | + +The levelpack ZIP: + +* Filename: builtin-200-AzulianTag.levelpack +* Original: 21M (21824441) +* New size: 525K (537345) 97% + +# Doodads + +Spot check of random doodad filesize changes: + +| Filename | Orig Size | New Size | +|--------------------------|-----------|----------| +| anvil.doodad | 2.7K | 1.3K | +| azu-blu.doodad | 8.1K | 5.2K | +| azu-red.doodad | 8.1K | 5.2K | +| azu-white.doodad | 8.1K | 5.2K | +| box.doodad | 29K | 4.1K | +| boy.doodad | 30K | 8.1K | +| crumbly-floor.doodad | 15K | 3.3K | +| door-blue.doodad | 18K | 2.7K | +| electric-trapdoor.doodad | 9.5K | 2.8K | + +Total file size of all builtin doodads: + +* Original: 576.8 KiB +* New: 153.7 KiB (73% reduction) + +# Game Binary + +* Filename: sketchymaze +* Original: 105M +* New size: 30M, 71% smaller \ No newline at end of file diff --git a/pkg/level/chunk.go b/pkg/level/chunk.go index 80d5d53..6ea8112 100644 --- a/pkg/level/chunk.go +++ b/pkg/level/chunk.go @@ -21,7 +21,6 @@ import ( const ( MapType uint64 = iota RLEType - GridType ) // Default chunk type for newly created chunks (was MapType). -- 2.30.2 From f35bc48c055c18fb56b646d78418d39c52580608 Mon Sep 17 00:00:00 2001 From: Noah Petherbridge Date: Fri, 24 May 2024 16:43:11 -0700 Subject: [PATCH 6/6] Code cleanup for RLE compression --- Changes.md | 29 ++++++++++++++++++++++ pkg/level/chunk_map.go | 2 +- pkg/level/chunk_rle.go | 48 ++---------------------------------- pkg/level/chunker.go | 5 ---- pkg/level/chunker_migrate.go | 5 +--- 5 files changed, 33 insertions(+), 56 deletions(-) diff --git a/Changes.md b/Changes.md index fbd49c1..e40a2dc 100644 --- a/Changes.md +++ b/Changes.md @@ -1,5 +1,34 @@ # Changes +## v0.14.1 (TBD) + +The file format for Levels and Doodads has been optimized to store drawing data +with Run Length Encoding (RLE) compression which nets a filesize savings upwards +of 90%, especially for levels featuring large areas of solid colors. + +* For example, the Shapeshifter level from the First Quest has shrank from + 22 MB to only 263 KB. +* The complete size of the First Quest levelpack from the previous release of + the game shrinks from 50 MB to only 1.8 MB! +* The game is still able to load levels and doodads created by previous releases + and will automatically convert them into the optimized RLE format when you + save them back to disk. +* The `doodad resave` command can also optimize your levels and doodads outside + of the game's editor. + +Other miscellaneous changes: + +* Command line option `sketchymaze --new` to open the game quickly to a new + level in the editor. + +Cleanup of old features and unused code: + +* The game can no longer save any Chunk files in their legacy JSON format: it + can still read JSON but all writes will be in the binary chunk format (usually + with the new RLE compression). Regular releases of the game have not been + writing in the JSON format for a while as it is controlled by hard-coded + feature flag constants. + ## v0.14.0 (May 4 2024) Level screenshots and thumbnails: diff --git a/pkg/level/chunk_map.go b/pkg/level/chunk_map.go index f5b5f3c..1763ee0 100644 --- a/pkg/level/chunk_map.go +++ b/pkg/level/chunk_map.go @@ -278,7 +278,7 @@ func (a *MapAccessor) UnmarshalBinary(compressed []byte) error { defer a.mu.Unlock() // New format: decompress the byte stream. - log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) + // log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) var reader = bytes.NewBuffer(compressed) diff --git a/pkg/level/chunk_rle.go b/pkg/level/chunk_rle.go index 0744552..3292890 100644 --- a/pkg/level/chunk_rle.go +++ b/pkg/level/chunk_rle.go @@ -2,7 +2,6 @@ package level import ( "git.kirsle.net/SketchyMaze/doodle/pkg/level/rle" - "git.kirsle.net/SketchyMaze/doodle/pkg/log" "git.kirsle.net/go/render" ) @@ -63,7 +62,7 @@ This accessor uses Run Length Encoding (RLE) in its binary format. Starting with the top-left pixel of this chunk, the binary format is a stream of bytes formatted as such: -- UVarint for the palette index number (0-255), with 0xFF meaning void +- UVarint for the palette index number (0-255), with 0xFFFF meaning void - UVarint for the length of repetition of that palette index */ func (a *RLEAccessor) MarshalBinary() ([]byte, error) { @@ -103,7 +102,7 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { defer a.acc.mu.Unlock() // New format: decompress the byte stream. - log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) + // log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed)) grid, err := rle.NewGrid(int(a.chunk.Size)) if err != nil { @@ -129,46 +128,3 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error { return nil } - -/* -// Prepare the 2D grid to decompress the RLE stream into. - var ( - size = int(a.chunk.Size) - _, err = rle.NewGrid(size) - x, y, cursor int - ) - if err != nil { - return err - } - - var reader = bytes.NewBuffer(compressed) - - for { - var ( - paletteIndex, err1 = binary.ReadUvarint(reader) - repeatCount, err2 = binary.ReadUvarint(reader) - ) - - if err1 != nil || err2 != nil { - log.Error("reading Uvarints from compressed data: {%s, %s}", err1, err2) - break - } - - log.Warn("RLE index %d for %dpx", paletteIndex, repeatCount) - - for i := uint64(0); i < repeatCount; i++ { - cursor++ - if cursor%size == 0 { - y++ - x = 0 - } else { - x++ - } - - point := render.NewPoint(int(x), int(y)) - if paletteIndex != 0xFF { - a.acc.grid[point] = NewSparseSwatch(int(paletteIndex)) - } - } - } -*/ diff --git a/pkg/level/chunker.go b/pkg/level/chunker.go index 134366a..dfa02fe 100644 --- a/pkg/level/chunker.go +++ b/pkg/level/chunker.go @@ -627,11 +627,6 @@ func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uin } ) - if point.X < 0 || point.Y < 0 { - log.Error("RelativeCoordinate: X < 0! abs=%s rel=%s chunk=%s size=%d", abs, point, chunkCoord, chunkSize) - log.Error("RelativeCoordinate(2): size=%d offset=%s point=%s", size, offset, point) - } - return point } diff --git a/pkg/level/chunker_migrate.go b/pkg/level/chunker_migrate.go index 60fce35..751596b 100644 --- a/pkg/level/chunker_migrate.go +++ b/pkg/level/chunker_migrate.go @@ -14,9 +14,7 @@ import ( // and possibly migrate them to a different Accessor implementation when // saving on disk. func (c *Chunker) OptimizeChunkerAccessors() { - log.Info("Optimizing Chunker Accessors") - - // TODO: parallelize this with goroutines + // Parallelize this with goroutines. var ( chunks = make(chan *Chunk, len(c.Chunks)) wg sync.WaitGroup @@ -58,7 +56,6 @@ func (c *Chunker) OptimizeChunkerAccessors() { close(chunks) wg.Wait() - } // FromMapAccessor migrates from a MapAccessor to RLE. -- 2.30.2