RLE Compression for File Formats #95

Merged
kirsle merged 6 commits from rle-compression into master 2024-05-24 23:48:00 +00:00
28 changed files with 1546 additions and 120 deletions

View File

@ -15,6 +15,7 @@
- [Build on macOS from scratch](#build-on-macos-from-scratch) - [Build on macOS from scratch](#build-on-macos-from-scratch)
- [WebAssembly](#webassembly) - [WebAssembly](#webassembly)
- [Build Tags](#build-tags) - [Build Tags](#build-tags)
- [doodad](#doodad)
- [dpp](#dpp) - [dpp](#dpp)
# Dockerfile # Dockerfile
@ -373,6 +374,13 @@ Some tips to get a WASM build to work:
Go build tags used by this game: Go build tags used by this game:
## doodad
This tag is used when building the `doodad` command-line tool.
It ensures that the embedded bindata assets (built-in doodads, etc.) do not
need to be bundled into the doodad binary, but only the main game binary.
## dpp ## dpp
The dpp tag stands for Doodle++ and is used for official commercial builds of The dpp tag stands for Doodle++ and is used for official commercial builds of

View File

@ -1,5 +1,34 @@
# Changes # Changes
## v0.14.1 (TBD)
The file format for Levels and Doodads has been optimized to store drawing data
with Run Length Encoding (RLE) compression which nets a filesize savings upwards
of 90%, especially for levels featuring large areas of solid colors.
* For example, the Shapeshifter level from the First Quest has shrank from
22 MB to only 263 KB.
* The complete size of the First Quest levelpack from the previous release of
the game shrinks from 50 MB to only 1.8 MB!
* The game is still able to load levels and doodads created by previous releases
and will automatically convert them into the optimized RLE format when you
save them back to disk.
* The `doodad resave` command can also optimize your levels and doodads outside
of the game's editor.
Other miscellaneous changes:
* Command line option `sketchymaze --new` to open the game quickly to a new
level in the editor.
Cleanup of old features and unused code:
* The game can no longer save any Chunk files in their legacy JSON format: it
can still read JSON but all writes will be in the binary chunk format (usually
with the new RLE compression). Regular releases of the game have not been
writing in the JSON format for a while as it is controlled by hard-coded
feature flag constants.
## v0.14.0 (May 4 2024) ## v0.14.0 (May 4 2024)
Level screenshots and thumbnails: Level screenshots and thumbnails:

View File

@ -23,7 +23,7 @@ setup: clean
.PHONY: build .PHONY: build
build: build:
go build $(LDFLAGS) $(BUILD_TAGS) -o bin/sketchymaze cmd/doodle/main.go go build $(LDFLAGS) $(BUILD_TAGS) -o bin/sketchymaze cmd/doodle/main.go
go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad cmd/doodad/main.go go build $(LDFLAGS) -tags=doodad -o bin/doodad cmd/doodad/main.go
# `make buildall` to run all build steps including doodads. # `make buildall` to run all build steps including doodads.
.PHONY: buildall .PHONY: buildall
@ -34,7 +34,7 @@ buildall: doodads build
build-free: build-free:
gofmt -w . gofmt -w .
go build $(LDFLAGS) -o bin/sketchymaze cmd/doodle/main.go go build $(LDFLAGS) -o bin/sketchymaze cmd/doodle/main.go
go build $(LDFLAGS) -o bin/doodad cmd/doodad/main.go go build $(LDFLAGS) -tags=doodad -o bin/doodad cmd/doodad/main.go
# `make bindata` generates the embedded binary assets package. # `make bindata` generates the embedded binary assets package.
.PHONY: bindata .PHONY: bindata
@ -75,7 +75,7 @@ mingw:
go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go
env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \ env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \
GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \
go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad.exe cmd/doodad/main.go go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go
# `make mingw32` to cross-compile a Windows binary with mingw (32-bit). # `make mingw32` to cross-compile a Windows binary with mingw (32-bit).
.PHONY: mingw32 .PHONY: mingw32
@ -85,7 +85,7 @@ mingw32:
go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go go build $(LDFLAGS_W) $(BUILD_TAGS) -o bin/sketchymaze.exe cmd/doodle/main.go
env CGO_ENABLED="1" CC="/usr/bin/i686-w64-mingw32-gcc" \ env CGO_ENABLED="1" CC="/usr/bin/i686-w64-mingw32-gcc" \
GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \
go build $(LDFLAGS) $(BUILD_TAGS) -o bin/doodad.exe cmd/doodad/main.go go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go
# `make mingw-free` for Windows binary in free mode. # `make mingw-free` for Windows binary in free mode.
.PHONY: mingw-free .PHONY: mingw-free
@ -95,7 +95,7 @@ mingw-free:
go build $(LDFLAGS_W) -o bin/sketchymaze.exe cmd/doodle/main.go go build $(LDFLAGS_W) -o bin/sketchymaze.exe cmd/doodle/main.go
env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \ env CGO_ENABLED="1" CC="/usr/bin/x86_64-w64-mingw32-gcc" \
GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \ GOOS="windows" CGO_LDFLAGS="-lmingw32 -lSDL2" CGO_CFLAGS="-D_REENTRANT" \
go build $(LDFLAGS) -o bin/doodad.exe cmd/doodad/main.go go build $(LDFLAGS) -tags=doodad -o bin/doodad.exe cmd/doodad/main.go
# `make release` runs the release.sh script, must be run # `make release` runs the release.sh script, must be run
# after `make dist` # after `make dist`

View File

@ -58,6 +58,7 @@ Some to start with:
* [Building](Building.md) the game (tl;dr. run bootstrap.py) * [Building](Building.md) the game (tl;dr. run bootstrap.py)
* [Tour of the Code](docs/Tour%20of%20the%20Code.md) * [Tour of the Code](docs/Tour%20of%20the%20Code.md)
* [Evolution of File Formats](docs/Evolution%20of%20File%20Formats.md)
# Keybindings # Keybindings

View File

@ -1,3 +1,6 @@
//go:build !doodad
// +build !doodad
// Package assets gets us off go-bindata by using Go 1.16 embed support. // Package assets gets us off go-bindata by using Go 1.16 embed support.
// //
// For Go 1.16 embed, this source file had to live inside the assets/ folder // For Go 1.16 embed, this source file had to live inside the assets/ folder

32
assets/assets_omitted.go Normal file
View File

@ -0,0 +1,32 @@
//go:build doodad
// +build doodad
// Dummy version of assets_embed.go that doesn't embed any files.
// For the `doodad` tool.
package assets
import (
"embed"
"errors"
)
var Embedded embed.FS
var errNotEmbedded = errors.New("assets not embedded")
// AssetDir returns the list of embedded files at the directory name.
func AssetDir(name string) ([]string, error) {
return nil, errNotEmbedded
}
// Asset returns the byte data of an embedded asset.
func Asset(name string) ([]byte, error) {
return nil, errNotEmbedded
}
// AssetNames dumps the names of all embedded assets,
// with their legacy "assets/" prefix from go-bindata.
func AssetNames() []string {
return nil
}

View File

@ -21,27 +21,10 @@ func init() {
Usage: "load and re-save a level or doodad file to migrate to newer file format versions", Usage: "load and re-save a level or doodad file to migrate to newer file format versions",
ArgsUsage: "<.level or .doodad>", ArgsUsage: "<.level or .doodad>",
Flags: []cli.Flag{ Flags: []cli.Flag{
&cli.BoolFlag{
Name: "actors",
Usage: "print verbose actor data in Level files",
},
&cli.BoolFlag{
Name: "chunks",
Usage: "print verbose data about all the pixel chunks in a file",
},
&cli.BoolFlag{
Name: "script",
Usage: "print the script from a doodad file and exit",
},
&cli.StringFlag{ &cli.StringFlag{
Name: "attachment", Name: "output",
Aliases: []string{"a"}, Aliases: []string{"o"},
Usage: "print the contents of the attached filename to terminal", Usage: "write to a different file than the input",
},
&cli.BoolFlag{
Name: "verbose",
Aliases: []string{"v"},
Usage: "print verbose output (all verbose flags enabled)",
}, },
}, },
Action: func(c *cli.Context) error { Action: func(c *cli.Context) error {
@ -84,6 +67,18 @@ func resaveLevel(c *cli.Context, filename string) error {
log.Info("Loaded level from file: %s", filename) log.Info("Loaded level from file: %s", filename)
log.Info("Last saved game version: %s", lvl.GameVersion) log.Info("Last saved game version: %s", lvl.GameVersion)
// Different output filename?
if output := c.String("output"); output != "" {
log.Info("Output will be saved to: %s", output)
filename = output
}
if err := lvl.Vacuum(); err != nil {
log.Error("Vacuum error: %s", err)
} else {
log.Info("Run vacuum on level file.")
}
log.Info("Saving back to disk") log.Info("Saving back to disk")
if err := lvl.WriteJSON(filename); err != nil { if err := lvl.WriteJSON(filename); err != nil {
return fmt.Errorf("couldn't write %s: %s", filename, err) return fmt.Errorf("couldn't write %s: %s", filename, err)
@ -100,6 +95,18 @@ func resaveDoodad(c *cli.Context, filename string) error {
log.Info("Loaded doodad from file: %s", filename) log.Info("Loaded doodad from file: %s", filename)
log.Info("Last saved game version: %s", dd.GameVersion) log.Info("Last saved game version: %s", dd.GameVersion)
// Different output filename?
if output := c.String("output"); output != "" {
log.Info("Output will be saved to: %s", output)
filename = output
}
if err := dd.Vacuum(); err != nil {
log.Error("Vacuum error: %s", err)
} else {
log.Info("Run vacuum on doodad file.")
}
log.Info("Saving back to disk") log.Info("Saving back to disk")
if err := dd.WriteJSON(filename); err != nil { if err := dd.WriteJSON(filename); err != nil {
return fmt.Errorf("couldn't write %s: %s", filename, err) return fmt.Errorf("couldn't write %s: %s", filename, err)

View File

@ -1,6 +1,8 @@
package commands package commands
import ( import (
"bytes"
"encoding/binary"
"fmt" "fmt"
"path/filepath" "path/filepath"
"sort" "sort"
@ -9,6 +11,7 @@ import (
"git.kirsle.net/SketchyMaze/doodle/pkg/doodads" "git.kirsle.net/SketchyMaze/doodle/pkg/doodads"
"git.kirsle.net/SketchyMaze/doodle/pkg/enum" "git.kirsle.net/SketchyMaze/doodle/pkg/enum"
"git.kirsle.net/SketchyMaze/doodle/pkg/level" "git.kirsle.net/SketchyMaze/doodle/pkg/level"
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
"git.kirsle.net/SketchyMaze/doodle/pkg/log" "git.kirsle.net/SketchyMaze/doodle/pkg/log"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
) )
@ -44,6 +47,14 @@ func init() {
Aliases: []string{"v"}, Aliases: []string{"v"},
Usage: "print verbose output (all verbose flags enabled)", Usage: "print verbose output (all verbose flags enabled)",
}, },
&cli.BoolFlag{
Name: "visualize-rle",
Usage: "visually dump RLE encoded chunks to the terminal (VERY noisy for large drawings!)",
},
&cli.StringFlag{
Name: "chunk",
Usage: "specific chunk coordinate; when debugging chunks, only show this chunk (example: 2,-1)",
},
}, },
Action: func(c *cli.Context) error { Action: func(c *cli.Context) error {
if c.NArg() < 1 { if c.NArg() < 1 {
@ -263,6 +274,10 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
chunkSize = int(ch.Size) chunkSize = int(ch.Size)
width = worldSize.W - worldSize.X width = worldSize.W - worldSize.X
height = worldSize.H - worldSize.Y height = worldSize.H - worldSize.Y
// Chunk debugging CLI options.
visualize = c.Bool("visualize-rle")
specificChunk = c.String("chunk")
) )
fmt.Println("Chunks:") fmt.Println("Chunks:")
fmt.Printf(" Pixels Per Chunk: %d^2\n", ch.Size) fmt.Printf(" Pixels Per Chunk: %d^2\n", ch.Size)
@ -278,7 +293,18 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
// Verbose chunk information. // Verbose chunk information.
if c.Bool("chunks") || c.Bool("verbose") { if c.Bool("chunks") || c.Bool("verbose") {
fmt.Println(" Chunk Details:") fmt.Println(" Chunk Details:")
for point, chunk := range ch.Chunks { for point := range ch.IterChunks() {
// Debugging specific chunk coordinate?
if specificChunk != "" && point.String() != specificChunk {
log.Warn("Skip chunk %s: not the specific chunk you're looking for", point)
continue
}
chunk, ok := ch.GetChunk(point)
if !ok {
continue
}
fmt.Printf(" - Coord: %s\n", point) fmt.Printf(" - Coord: %s\n", point)
fmt.Printf(" Type: %s\n", chunkTypeToName(chunk.Type)) fmt.Printf(" Type: %s\n", chunkTypeToName(chunk.Type))
fmt.Printf(" Range: (%d,%d) ... (%d,%d)\n", fmt.Printf(" Range: (%d,%d) ... (%d,%d)\n",
@ -287,6 +313,33 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
(int(point.X)*chunkSize)+chunkSize, (int(point.X)*chunkSize)+chunkSize,
(int(point.Y)*chunkSize)+chunkSize, (int(point.Y)*chunkSize)+chunkSize,
) )
fmt.Printf(" Usage: %f (%d len of %d)\n", chunk.Usage(), chunk.Len(), chunkSize*chunkSize)
// Visualize the RLE encoded chunks?
if visualize && chunk.Type == level.RLEType {
ext, bin, err := ch.RawChunkFromZipfile(point)
if err != nil {
log.Error(err.Error())
continue
} else if ext != ".bin" {
log.Error("Unexpected filetype for RLE compressed chunk (expected .bin, got %s)", ext)
continue
}
// Read off the first byte (chunk type)
var reader = bytes.NewBuffer(bin)
binary.ReadUvarint(reader)
bin = reader.Bytes()
grid, err := rle.NewGrid(chunkSize)
if err != nil {
log.Error(err.Error())
continue
}
grid.Decompress(bin)
fmt.Println(grid.Visualize())
}
} }
} else { } else {
fmt.Println(" Use -chunks or -verbose to serialize Chunks") fmt.Println(" Use -chunks or -verbose to serialize Chunks")
@ -298,8 +351,8 @@ func chunkTypeToName(v uint64) string {
switch v { switch v {
case level.MapType: case level.MapType:
return "map" return "map"
case level.GridType: case level.RLEType:
return "grid" return "rle map"
default: default:
return fmt.Sprintf("type %d", v) return fmt.Sprintf("type %d", v)
} }

View File

@ -101,6 +101,11 @@ func main() {
Name: "chdir", Name: "chdir",
Usage: "working directory for the game's runtime package", Usage: "working directory for the game's runtime package",
}, },
&cli.BoolFlag{
Name: "new",
Aliases: []string{"n"},
Usage: "open immediately to the level editor",
},
&cli.BoolFlag{ &cli.BoolFlag{
Name: "edit", Name: "edit",
Aliases: []string{"e"}, Aliases: []string{"e"},
@ -248,6 +253,8 @@ func main() {
if c.Bool("guitest") { if c.Bool("guitest") {
game.Goto(&doodle.GUITestScene{}) game.Goto(&doodle.GUITestScene{})
} else if c.Bool("new") {
game.NewMap()
} else if filename != "" { } else if filename != "" {
if c.Bool("edit") { if c.Bool("edit") {
game.EditFile(filename) game.EditFile(filename)

View File

@ -0,0 +1,488 @@
# Evolution of File Formats
This document will cover the evolution of the game's primary file formats (Level and Doodad drawings): how the on-disk format has changed over time to better compress/optimize the drawing data, and how the game continued to maintain backwards compatibility.
The game, so far, is always able to _read_ levels and doodads created by older versions (all the way back to the very first alpha build!) and, upon saving them, will convert the file format to the latest standard in order to optimize and reduce disk space usage.
The game can generally be configured (by editing feature flag constants) to _output_ drawings in the various legacy formats as well. Between the v1 JSON and v2 Gzip-JSON formats, the game is able to translate back and forth. From v3 Zipfiles onwards, back-migrating drawing files is not a supported operation - it can always save drawings _forward_ but code is not in place to e.g. take Zipfile members and put them back into the root struct to revert to a classic JSON-style file.
Table of Contents:
- [Evolution of File Formats](#evolution-of-file-formats)
- [General Design](#general-design)
- [A common file format between Levels and Doodads](#a-common-file-format-between-levels-and-doodads)
- [Chunks and the Chunker](#chunks-and-the-chunker)
- [Chunk Accessors](#chunk-accessors)
- [File Format Versions](#file-format-versions)
- [v1: Simple JSON Format](#v1-simple-json-format)
- [v2: GZip compressed JSON](#v2-gzip-compressed-json)
- [v3: Zip archives with external chunks](#v3-zip-archives-with-external-chunks)
- [Structure of the Zipfile](#structure-of-the-zipfile)
- [How I made it backwards compatible](#how-i-made-it-backwards-compatible)
- [Migrating the ZIP file on every Save](#migrating-the-zip-file-on-every-save)
- [The old Chunks and Files become staging areas](#the-old-chunks-and-files-become-staging-areas)
- [v3.1: A binary chunk file format](#v31-a-binary-chunk-file-format)
- [v4: Run Length Encoding MapAccessor](#v4-run-length-encoding-mapaccessor)
- [File Size Savings](#file-size-savings)
- [First Quest](#first-quest)
- [Tutorial Levels](#tutorial-levels)
- [Azulian Tag](#azulian-tag)
- [Built-in Doodads](#built-in-doodads)
- [Game Binary Size](#game-binary-size)
# General Design
Some thought and planning went into this in the very beginning. This section covers the general design goals of Levels & Doodads, how their actual pixel data is managed (in chunks), and how I left it open-ended to experiment with different chunk accessor algorithms in the future.
## A common file format between Levels and Doodads
Under the hood, the file format for .level and .doodad files are extremely similar. They share a handful of properties in common at their root data structure:
* Version (of the file format - still at version 1!)
* Game Version (that last saved this file)
* Title, Author, common metadata
* Attached file storage such as custom wallpapers
Both levels and doodads also have a Chunker somewhere that stores the actual pixels-on-screen drawing data for them.
Both file types have evolved together, and when optimizations are made for e.g. Level files, the Doodads automatically benefit too for sharing a lot of code in common.
## Chunks and the Chunker
The drawing data itself (pixels on screen) from the beginning was decided to be split into Chunks, and each Chunk would manage the pixels for its part of the overall drawing. This can enable arbitrarily large drawing sizes for levels and doodads, with theoretically "infinite" boundaries (within computer integer bounds).
Each Level or Doodad file will have one or more Chunkers. The Chunker itself stores the common properties for the drawing (like the Chunk Size, e.g., 128 square pixels by default), and it manages translating from "world coordinates" of your drawing into "chunk coordinates", so it knows which Chunk is responsible for that part of the drawing.
For an arbitrary world coordinate (like 900,-290) the Chunker can divide it by the Chunk Size of 128 and find that chunk coordinate (7,-2) is responsible for that chunk and asks it for its pixels.
Levels currently only have one Chunker, but Doodads have many (one for each frame of animation they store).
## Chunk Accessors
There is support from the beginning for each Chunk to manage its own data in any way it wants to. For example, a Chunk that is completely filled by one color of pixel could store its information _much_ more succinctly than a chunk made up of very sparse lines, where each pixel coordinate needs to be accounted for.
The first type of Chunk accessor was the **MapAccessor**, which stored the X,Y coordinates of each pixel mapped to their Palette color index (see the example below, in [v1: Simple JSON Format](#v1-simple-json-format)).
It was planned that future accessors would be added such as a **GridAccessor** for very densely packed chunks (to store in a 2D array) and have the Chunker automatically decide which format is optimal to encode it but this was still never added.
From the game's first alpha (0.0.9, July 9 2019) through version 0.14.0 (May 4 2024), the MapAccessor was the only one ever implemented.
# File Format Versions
## v1: Simple JSON Format
At first, levels were just saved as simple JSON files (whitespace compressed only), which when pretty printed (and with comment annotations added) looked like this:
```javascript
{
// Common properties between levels and doodads
"version": 1, // json schema version, still at "1" today!
"gameVersion": "0.0.10-alpha",
"title": "Alpha",
"author": "Noah P",
"locked": false, // read locked/won't open in editor
"files": null, // attached files
"passwd": "", // level password (never used)
// The drawing data itself, divided into chunks.
"chunks": {
"size": 128,
"chunks": {
// Chunk coordinate
"0,0": {
"type": 0, // 0 = MapAccessor chunk type
"data": {
// Each pixel coordinate mapped
// to a palette index number...
"69,32": 0,
"69,33": 0,
"70,34": 0,
}
}
}
},
"palette": {
"swatches": [
// indexed color palette for the drawing
{
"name": "solid",
"color": "#000000",
"solid": true
},
{
"name": "decoration",
"color": "#999999"
},
{
"name": "fire",
"color": "#ff0000",
"fire": true
},
{
"name": "water",
"color": "#0000ff",
"water": true
}
]
},
"pageType": 2, // 2 = Bounded LevelType
"boundedWidth": 2550,
"boundedHeight": 3300,
"wallpaper": "notebook.png",
"actors": {
// doodads in your level, by their instanced ID
"4d193308-a52d-4153-a10d-a010445dd47b": {
"filename": "button.doodad",
"point": "154,74",
"links": [
// linked actor IDs
"8d501581-0904-4dfb-a326-57330b2484be"
]
},
"8d501581-0904-4dfb-a326-57330b2484be": {
"filename": "electric-door.doodad",
"point": "320,74",
"links": [
"4d193308-a52d-4153-a10d-a010445dd47b"
]
}
}
}
```
A **doodad file** was very similar but had some other relevant properties in its JSON format, such as:
* Their Size (dimensions)
* JavaScript source code
* Hitbox, Tags/Options
A doodad file has one Palette like a level, but it has multiple chunkers (one for each layer of the doodad; which are how you store frames for animation or state changes).
What level and doodads have in common is File storage for attaching files into them (such as custom wallpapers for a level, or sound effects for a doodad), with their binary data encoded to base64.
In the first iteration of the file format, _all_ of this was encoded into the single JSON file on disk!
For densely packed levels, though, the JSON file got really large quickly, even with the whitespace removed.
## v2: GZip compressed JSON
The second iteration was to basically add gzip compression to the level files, which slashed their file size considerably.
How I made it backwards compatible:
The game is able to open a ".level" file which is _either_ a straight JSON file from older versions of the game, or the new gzip compressed format.
When **opening** a file, it:
1. Checks if the first byte is the ASCII character `{`, and will parse it as legacy v1 JSON format.
2. Checks if the file's opening bytes are instead a gzip header (hex `1f8b`), and will load it from GZip (v2 file format).
The GZip reader is basically a wrapper that decodes a JSON file with compression:
```go
// pkg/level/fmt_json.go
// FromGzip deserializes a gzip compressed level JSON.
func FromGzip(data []byte) (*Level, error) {
// This function works, do not touch.
var (
level = New()
buf = bytes.NewBuffer(data)
reader *gzip.Reader
decoder *json.Decoder
)
reader, err := gzip.NewReader(buf)
if err != nil {
return nil, err
}
decoder = json.NewDecoder(reader)
decoder.Decode(level)
return level, nil
}
```
## v3: Zip archives with external chunks
Very large or dense levels were resulting in enormous file sizes even with gzip compression, and they were taking a long time to load from disk since _all_ chunk data was still in one file!
A loading screen feature was added to Sketchy Maze around this time because big levels could take _seconds_ to load.
The level format was reworked again and now the .level file is basically a Zip archive with member files within. Most importantly, this enabled the drawing's chunks to be kicked out into separate files so we could manage "loading" the level more efficiently.
### Structure of the Zipfile
A level zipfile is laid out like so:
```
/
level.json
assets/
screenshots/
large.png
chunks/
0/
-1,0.json
-1,1.json
-2,0.json
```
The `level.json` file contains most of the basic metadata from the old file format, except the chunks are evicted and stored in separate JSON files by their chunk coordinate. Doodads will have a `doodad.json` file here instead.
Notice the directory name **0/** holding the chunks for a level file: this zero is a layer ID to accommodate Doodads which share a similar file format. Levels only have one Layer (for now), so the directory name is always zero. Doodads will have each of their layers enumerated from 0, 1, 2, ...
Attached assets such as wallpapers or embedded custom doodads would be regular ZIP file members under the assets/ folder.
The level.json file as of v0.14.0 looks a bit like this:
```json
{
"version": 1,
"gameVersion": "0.14.0",
"title": "The Castle",
"author": "Noah P",
"locked": false,
"files": {}, // files are evicted to the assets/ folder
"passwd": "",
"uuid": "18f0f734-d7ad-4b10-be6d-f40d31334816",
"rules": {
"difficulty": 0
},
"chunks": {
"size": 128,
"chunks": {} // chunks are evicted as well
},
"palette": {
"swatches": [
{
"name": "grass",
"color": "#009900",
"pattern": "noise.png",
"solid": true
},
{
"name": "fire",
"color": "#ff0000",
"pattern": "marker.png",
"fire": true
}
]
},
"pageType": 0,
"boundedWidth": 2550,
"boundedHeight": 3300,
"wallpaper": "graph.png",
"scroll": "-3072,51",
"actors": {
"0fac06dc-fe1e-11eb-9dfc-9cb6d0c2aa8b": {
"filename": "key-blue.doodad",
"point": "4403,239"
},
"10554b0f-fe18-11eb-9dfc-9cb6d0c2aa8b": {
"filename": "door-blue.doodad",
"point": "3909,344"
},
},
"saveDoodads": false,
"saveBuiltins": false
}
```
### How I made it backwards compatible
The function that reads Levels and Doodads from disk continues to check its headers:
1. If the first byte is a `{` it's a legacy old drawing and is parsed as classic JSON (format v1)
2. If the first bytes are a GZip header, try loading it as a gz compressed JSON file (format v2)
3. If instead the header is a ZIP archive, open it and look for the `level.json` or `doodad.json` for the expected file you're opening.
It was able to continue loading drawings from the _very_ earliest alpha of the game and can still load them today.
### Migrating the ZIP file on every Save
The upgrade path to save a legacy drawing in the modern ZIP format was very straightforward:
In the root Level struct (what level.json decodes into):
* Deprecate the "files" and "chunks" fields since these are evicted out into separate ZIP file members, so they should be empty in the new file format.
* So on level save, if the Level has any data in these fields (meaning you had loaded it from a legacy file format), evict those fields to clear them out when generating the new ZIP file.
When an opened level _was_ a ZIP file to begin with, a pointer to the Zipfile handle is kept accessible to make saving those levels more efficient. When a level _wasn't_ a ZIP file, I basically create a new one, write the level.json and flush out its embedded files/chunks to their correct places.
So, when you've opened a ZIP file level and you re-save it, the process is:
1. First, copy any "cold storage" chunks from the old Zipfile to the new one.
* These are the chunks not actively in memory (see the next section about [staging areas](#the-old-chunks-and-files-become-staging-areas))
2. Then, flush out any data in the legacy "chunks" or "files" sections to external zipfile members.
* This is the same logic for migrating an old gzip-json level, where _all_ its data was in these places...
* As well as flushing out recently edited chunks or recently attached files (per the next section).
### The old Chunks and Files become staging areas
In the base Level struct, the old keys where Files and Chunks used to be stored have now become the staging area for "warmed up" chunks or recently attached files.
For example: the level.json file in the ZIP stores no data in these fields, and chunks are stored as separate members. Whenever the game **loads** a chunk from ZIP, it will cache it in the old Chunks structure so it has it warmed up and ready to use.
When **playing** a level: there is a chunk loading/unloading algorithm that balances memory use during gameplay. Chunks which are currently on screen may be fetched from the ZIP file and cached in the legacy Chunks structure. The game will track which chunks are accessed on the current game tick (as well as the previous couple of ticks).
If a chunk has not been accessed in a few game ticks, it is destroyed and removed from the legacy Chunks structure (along with its SDL2 texture being cleaned up, etc.); if the player scrolls the chunk back on screen, it is recalled from the ZIP file and cached again.
When **editing** a level in the editor, any chunk that receives a local modification is also stored in the old Chunks structure, and is kept there until the next save: when all the loaded chunks are flushed out to ZIP files. Chunks with modifications are NOT flushed by the auto-loading/unloading algorithm so their changes don't get lost.
## v3.1: A binary chunk file format
At this point: there is still only one Chunk accessor (the MapAccessor) and its JSON files in the zip file still looked like (if pretty printed):
```javascript
{
"type": 0,
"data": {
"69,32": 1, // coordinate to palette index
"69,33": 2,
"70,34": 0,
}
}
```
The next iteration was to compress these down into a binary format to shrink them further by removing the extra JSON characters (quotes, brackets, etc.) and the ASCII human readable digits.
In the ZIP file: the legacy chunks will have their .json file extension but the new binary format stores them into .bin files; so the game is able to load old and new levels by checking the file types available for their chunks.
The binary format makes use of variable-length integers provided by Go's encoding/binary package. This is the same VarInt type from Protocol Buffers: small numbers encode to a few number of bytes, and large numbers may use additional bytes.
* The **first** Uvarint in the binary format is the chunk type (0 = MapAccessor)
* The remaining data is arbitrary and up to that chunk accessor to handle how it wants.
For the MapAccessor: the remaining binary data is a repeating stream of three varints:
1. X coordinate
2. Y coordinate
3. Palette index number
For migrating old JSON chunks into binary format: on save it will always output in the .bin format (by calling the chunk accessor's MarshalBinary method), but on reading is able to handle both .bin and legacy .json.
## v4: Run Length Encoding MapAccessor
After the release of v0.14.0 of the game, a new chunk accessor has _finally_ been added to the game: the **RLEAccessor**.
The RLEAccessor is functionally identical to the MapAccessor, in that (in working memory) it stores a hash map of world coordinates to the palette color. But where the RLEAccessor is different is with the **on disk format** of how it encodes its chunks.
The on-disk format uses binary (.bin) only, and compressed the chunk's pixel data using Run Length Encoding (RLE). The algorithm is basically:
* When **compressing** your chunk data to save on disk:
* It creates a 2D grid array of integers in order to rasterize a complete bitmap of the chunk.
* For a chunk size of 128, this is a 128x128 2D array.
* The values are your palette index numbers (0 to N)
* "Null" colors that are blank in the chunk uses the value 0xFFFF.
* Note: the gameplay logic enforces only 256 colors per level palette, but theoretically 65,534 colors could be supported before the "null" color would collide.
* It then serializes the 2D bitmap using RLE with a series of packed Uvarints:
1. The palette color to set
2. The number of pixels to repeat that palette color.
* When **decompressing** the RLE encoded data, the process is reversed:
* It creates a 2D grid of your square chunk size again (all nulls)
* Then it decompresses the RLE encoded stream of Uvarints, filling out the grid from the top-left to bottom-right corner.
* Finally, it scans the grid to find non-null colors to populate its regular MapAccessor struct of points-to-colors.
For a simple example: if a chunk consisted 100% of the same color on all 128x128 pixels, the compressed RLE stream contains only 3 or 4 bytes on disk:
1. The palette index number
2. The repeat number (16,384 for a 128x128 chunk grid)
For **migrating MapAccessors to RLEAccessors:**
The game is still able to read legacy MapAccessor chunks, and when **saving** a drawing back to disk, it fans out and checks all your level chunks if they need to be optimized:
* If their chunk type is a MapAccessor, copy the underlying map data into an RLEAccessor.
* Then when saving to disk, the RLEAccessor MarshalBinary() func will create the .bin file in the updated format on disk.
### File Size Savings
On average the RLE encoding slashes file sizes by over 90% for most levels, especially densely packed levels with lots of large colored areas.
Here are examples from the game's built-in level packs.
See [RLE Encoding for Levels](./RLE%20Encoding%20for%20Levels.md) for more breakdown of these numbers.
#### First Quest
| Filename | Orig Size | New Size | Reduction |
|--------------------|-----------|----------|-----------|
| Boat.level | 4.3M | 292K | 93% |
| Castle.level | 5.6M | 241K | 95% |
| Desert-1of2.level | 4.4M | 248K | 94% |
| Desert-2of2.level | 3.2M | 290K | 91% |
| Jungle.level | 11M | 581K | 94% |
| Shapeshifter.level | 22M | 263K | 98% |
| Thief 1.level | 538K | 193K | 64% |
The combined levelpack ZIP file itself:
* Filename: builtin-100-FirstQuest.levelpack
* Original: 50M (52369408)
* New size: 1.8M (1838542) 96%
The most notable improvement is Shapeshifter.level, which features **large** chunks of solid color and it compressed by 98% with the RLE encoding!
#### Tutorial Levels
Many of the Tutorial levels are made of sparsely drawn "line art" rather than solid colored areas, so the reduction in filesize is closer to ~60% instead of 90%+
| Filename | Orig Size | New Size | Reduction |
|--------------------|-----------|----------|-----------|
| Tutorial 1.level | 186K | 111K | 40% |
| Tutorial 2.level | 680K | 229K | 66% |
| Tutorial 3.level | 409K | 148K | 64% |
| Tutorial 4.level | 901K | 376K | 58% |
| Tutorial 5.level | 3M | 645K | 78% |
| Zoo.level | 2.8M | 226K | 92% |
The levelpack ZIP:
* Filename: builtin-Tutorial.levelpack
* Original: 7.8M (8119658)
* New size: 1.6M (1650381) 79%
#### Azulian Tag
| Filename | Orig Size | New Size | Reduction |
|---------------------------|-----------|----------|-----------|
| AzulianTag-Forest.level | 17M | 312K | 98% |
| AzulianTag-Night.level | 702K | 145K | 79% |
| AzulianTag-Tutorial.level | 3.4M | 185K | 94% |
The levelpack ZIP:
* Filename: builtin-200-AzulianTag.levelpack
* Original: 21M (21824441)
* New size: 525K (537345) 97%
#### Built-in Doodads
The RLE compression also improved the file sizes of the game's built-in doodads. For a random spot check of some:
| Filename | Orig Size | New Size |
|--------------------------|-----------|----------|
| anvil.doodad | 2.7K | 1.3K |
| azu-blu.doodad | 8.1K | 5.2K |
| azu-red.doodad | 8.1K | 5.2K |
| azu-white.doodad | 8.1K | 5.2K |
| box.doodad | 29K | 4.1K |
| boy.doodad | 30K | 8.1K |
| crumbly-floor.doodad | 15K | 3.3K |
| door-blue.doodad | 18K | 2.7K |
| electric-trapdoor.doodad | 9.5K | 2.8K |
Total file size of all builtin doodads:
* Original: 576.8 KiB
* New: 153.7 KiB (73% reduction)
#### Game Binary Size
The game binary embeds its built-in doodads and levelpacks directly, and so this optimization has also slashed the overall size of the game binary too:
* Filename: sketchymaze
* Original: 105M
* New size: 30M, 71% smaller

View File

@ -0,0 +1,116 @@
# RLE Encoding for Levels
This documents some of the file size savings of the game's built-in levelpacks and doodads once the file format was migrated to use Run Length Encoding (RLE) for drawing chunks.
See [Evolution of File Formats](./Evolution%20of%20File%20Formats.md) for a history of the game's file formats.
# Levels
The file sizes of the levels themselves:
## First Quest
| Filename | Orig Size | New Size | Reduction |
|--------------------|-----------|----------|-----------|
| Boat.level | 4.3M | 292K | 93% |
| Castle.level | 5.6M | 241K | 95% |
| Desert-1of2.level | 4.4M | 248K | 94% |
| Desert-2of2.level | 3.2M | 290K | 91% |
| Jungle.level | 11M | 581K | 94% |
| Shapeshifter.level | 22M | 263K | 98% |
| Thief 1.level | 538K | 193K | 64% |
In raw bytes:
| Filename | Orig Size | New Size |
|--------------------|-----------|----------|
| Boat.level | 4494184 | 298943 |
| Castle.level | 5854222 | 245872 |
| Desert-1of2.level | 4589382 | 253768 |
| Desert-2of2.level | 3310784 | 296681 |
| Jungle.level | 10928779 | 594601 |
| Shapeshifter.level | 22823811 | 269307 |
| Thief 1.level | 550579 | 196731 |
The levelpack ZIP:
* Filename: builtin-100-FirstQuest.levelpack
* Original: 50M (52369408)
* New size: 1.8M (1838542) 96%
## Tutorial
| Filename | Orig Size | New Size | Reduction |
|--------------------|-----------|----------|-----------|
| Tutorial 1.level | 186K | 111K | 40% |
| Tutorial 2.level | 680K | 229K | 66% |
| Tutorial 3.level | 409K | 148K | 64% |
| Tutorial 4.level | 901K | 376K | 58% |
| Tutorial 5.level | 3M | 645K | 78% |
| Zoo.level | 2.8M | 226K | 92% |
In raw bytes:
| Filename | Orig Size | New Size |
|--------------------|-----------|----------|
| Tutorial 1.level | 190171 | 113568 |
| Tutorial 2.level | 695936 | 233880 |
| Tutorial 3.level | 418490 | 150565 |
| Tutorial 4.level | 921781 | 384775 |
| Tutorial 5.level | 3059902 | 659487 |
| Zoo.level | 2925633 | 230712 |
The levelpack ZIP:
* Filename: builtin-Tutorial.levelpack
* Original: 7.8M (8119658)
* New size: 1.6M (1650381) 79%
## Azulian Tag
| Filename | Orig Size | New Size | Reduction |
|---------------------------|-----------|----------|-----------|
| AzulianTag-Forest.level | 17M | 312K | 98% |
| AzulianTag-Night.level | 702K | 145K | 79% |
| AzulianTag-Tutorial.level | 3.4M | 185K | 94% |
In raw bytes:
| Filename | Orig Size | New Size |
|---------------------------|-----------|----------|
| AzulianTag-Forest.level | 17662031 | 318547 |
| AzulianTag-Night.level | 718345 | 147612 |
| AzulianTag-Tutorial.level | 3508093 | 189310 |
The levelpack ZIP:
* Filename: builtin-200-AzulianTag.levelpack
* Original: 21M (21824441)
* New size: 525K (537345) 97%
# Doodads
Spot check of random doodad filesize changes:
| Filename | Orig Size | New Size |
|--------------------------|-----------|----------|
| anvil.doodad | 2.7K | 1.3K |
| azu-blu.doodad | 8.1K | 5.2K |
| azu-red.doodad | 8.1K | 5.2K |
| azu-white.doodad | 8.1K | 5.2K |
| box.doodad | 29K | 4.1K |
| boy.doodad | 30K | 8.1K |
| crumbly-floor.doodad | 15K | 3.3K |
| door-blue.doodad | 18K | 2.7K |
| electric-trapdoor.doodad | 9.5K | 2.8K |
Total file size of all builtin doodads:
* Original: 576.8 KiB
* New: 153.7 KiB (73% reduction)
# Game Binary
* Filename: sketchymaze
* Original: 105M
* New size: 30M, 71% smaller

View File

@ -28,6 +28,12 @@ const (
// If you set both flags to false, level zipfiles will use the classic // If you set both flags to false, level zipfiles will use the classic
// json chunk format as before on save. // json chunk format as before on save.
BinaryChunkerEnabled = true BinaryChunkerEnabled = true
// Enable "v3" Run-Length Encoding for level chunker.
//
// This only supports Zipfile levels and will use the ".bin" format
// enabled by the previous setting.
RLEBinaryChunkerEnabled = true
) )
// Feature Flags to turn on/off experimental content. // Feature Flags to turn on/off experimental content.

View File

@ -9,7 +9,7 @@ import (
const ( const (
AppName = "Sketchy Maze" AppName = "Sketchy Maze"
Summary = "A drawing-based maze game" Summary = "A drawing-based maze game"
Version = "0.14.0" Version = "0.14.1"
Website = "https://www.sketchymaze.com" Website = "https://www.sketchymaze.com"
Copyright = "2023 Noah Petherbridge" Copyright = "2023 Noah Petherbridge"
Byline = "a game by Noah Petherbridge." Byline = "a game by Noah Petherbridge."

View File

@ -0,0 +1,14 @@
package doodads
// Vacuum runs any maintenance or migration tasks for the level at time of save.
//
// It will prune broken links between actors, or migrate internal data structures
// to optimize storage on disk of its binary data.
func (m *Doodad) Vacuum() error {
// Let the Chunker optimize accessor types.
for _, layer := range m.Layers {
layer.Chunker.OptimizeChunkerAccessors()
}
return nil
}

View File

@ -164,6 +164,11 @@ func (d *Doodad) WriteFile(filename string) error {
d.Version = 1 d.Version = 1
d.GameVersion = branding.Version d.GameVersion = branding.Version
// Maintenance functions, clean up cruft before save.
if err := d.Vacuum(); err != nil {
log.Error("Vacuum level %s: %s", filename, err)
}
bin, err := d.ToJSON() bin, err := d.ToJSON()
if err != nil { if err != nil {
return err return err

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"encoding/binary" "encoding/binary"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"image" "image"
"math" "math"
@ -19,9 +20,12 @@ import (
// Types of chunks. // Types of chunks.
const ( const (
MapType uint64 = iota MapType uint64 = iota
GridType RLEType
) )
// Default chunk type for newly created chunks (was MapType).
const DefaultChunkType = RLEType
// Chunk holds a single portion of the pixel canvas. // Chunk holds a single portion of the pixel canvas.
type Chunk struct { type Chunk struct {
Type uint64 // map vs. 2D array. Type uint64 // map vs. 2D array.
@ -62,16 +66,15 @@ type Accessor interface {
Len() int Len() int
MarshalBinary() ([]byte, error) MarshalBinary() ([]byte, error)
UnmarshalBinary([]byte) error UnmarshalBinary([]byte) error
MarshalJSON() ([]byte, error)
UnmarshalJSON([]byte) error
} }
// NewChunk creates a new chunk. // NewChunk creates a new chunk.
func NewChunk() *Chunk { func NewChunk() *Chunk {
return &Chunk{ var c = &Chunk{
Type: MapType, Type: RLEType,
Accessor: NewMapAccessor(),
} }
c.Accessor = NewRLEAccessor(c)
return c
} }
// Texture will return a cached texture for the rendering engine for this // Texture will return a cached texture for the rendering engine for this
@ -326,31 +329,18 @@ func (c *Chunk) SizePositive() render.Rect {
} }
// Usage returns the percent of free space vs. allocated pixels in the chunk. // Usage returns the percent of free space vs. allocated pixels in the chunk.
func (c *Chunk) Usage(size int) float64 { func (c *Chunk) Usage() float64 {
return float64(c.Len()) / float64(size) size := float64(c.Size)
} return float64(c.Len()) / (size * size)
// MarshalJSON writes the chunk to JSON.
//
// DEPRECATED: MarshalBinary will encode chunks to a tighter binary format.
func (c *Chunk) MarshalJSON() ([]byte, error) {
data, err := c.Accessor.MarshalJSON()
if err != nil {
return []byte{}, err
}
generic := &JSONChunk{
Type: c.Type,
Data: data,
}
b, err := json.Marshal(generic)
return b, err
} }
// UnmarshalJSON loads the chunk from JSON and uses the correct accessor to // UnmarshalJSON loads the chunk from JSON and uses the correct accessor to
// parse the inner details. // parse the inner details.
// //
// DEPRECATED in favor of binary marshalling. // DEPRECATED in favor of binary marshalling.
//
// Only supports MapAccessor chunk types, which was the only one supported
// before this function was deprecated.
func (c *Chunk) UnmarshalJSON(b []byte) error { func (c *Chunk) UnmarshalJSON(b []byte) error {
// Parse it generically so we can hand off the inner "data" object to the // Parse it generically so we can hand off the inner "data" object to the
// right accessor for unmarshalling. // right accessor for unmarshalling.
@ -362,8 +352,11 @@ func (c *Chunk) UnmarshalJSON(b []byte) error {
switch c.Type { switch c.Type {
case MapType: case MapType:
c.Accessor = NewMapAccessor() c.Accessor = NewMapAccessor(c)
return c.Accessor.UnmarshalJSON(generic.Data) if unmarshaler, ok := c.Accessor.(json.Unmarshaler); ok {
return unmarshaler.UnmarshalJSON(generic.Data)
}
return errors.New("Chunk.UnmarshalJSON: this chunk doesn't support JSON unmarshaling")
default: default:
return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type) return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type)
} }
@ -406,7 +399,12 @@ func (c *Chunk) UnmarshalBinary(b []byte) error {
// Decode the rest of the byte stream. // Decode the rest of the byte stream.
switch chunkType { switch chunkType {
case MapType: case MapType:
c.Accessor = NewMapAccessor() c.Type = MapType
c.Accessor = NewMapAccessor(c)
return c.Accessor.UnmarshalBinary(reader.Bytes())
case RLEType:
c.Type = RLEType
c.Accessor = NewRLEAccessor(c)
return c.Accessor.UnmarshalBinary(reader.Bytes()) return c.Accessor.UnmarshalBinary(reader.Bytes())
default: default:
return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type) return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type)

View File

@ -16,17 +16,24 @@ import (
// MapAccessor implements a chunk accessor by using a map of points to their // MapAccessor implements a chunk accessor by using a map of points to their
// palette indexes. This is the simplest accessor and is best for sparse chunks. // palette indexes. This is the simplest accessor and is best for sparse chunks.
type MapAccessor struct { type MapAccessor struct {
chunk *Chunk // Pointer to parent struct, for its Size and Point
grid map[render.Point]*Swatch grid map[render.Point]*Swatch
mu sync.RWMutex mu sync.RWMutex
} }
// NewMapAccessor initializes a MapAccessor. // NewMapAccessor initializes a MapAccessor.
func NewMapAccessor() *MapAccessor { func NewMapAccessor(chunk *Chunk) *MapAccessor {
return &MapAccessor{ return &MapAccessor{
chunk: chunk,
grid: map[render.Point]*Swatch{}, grid: map[render.Point]*Swatch{},
} }
} }
// Reset the MapAccessor.
func (a *MapAccessor) Reset() {
a.grid = map[render.Point]*Swatch{}
}
// Inflate the sparse swatches from their palette indexes. // Inflate the sparse swatches from their palette indexes.
func (a *MapAccessor) Inflate(pal *Palette) error { func (a *MapAccessor) Inflate(pal *Palette) error {
for point, swatch := range a.grid { for point, swatch := range a.grid {

130
pkg/level/chunk_rle.go Normal file
View File

@ -0,0 +1,130 @@
package level
import (
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
"git.kirsle.net/go/render"
)
// RLEAccessor implements a chunk accessor which stores its on-disk format using
// Run Length Encoding (RLE), but in memory behaves equivalently to the MapAccessor.
type RLEAccessor struct {
chunk *Chunk // parent Chunk, for its Size and Point
acc *MapAccessor
}
// NewRLEAccessor initializes a RLEAccessor.
func NewRLEAccessor(chunk *Chunk) *RLEAccessor {
return &RLEAccessor{
chunk: chunk,
acc: NewMapAccessor(chunk),
}
}
// Inflate the sparse swatches from their palette indexes.
func (a *RLEAccessor) Inflate(pal *Palette) error {
return a.acc.Inflate(pal)
}
// Len returns the current size of the map, or number of pixels registered.
func (a *RLEAccessor) Len() int {
return a.acc.Len()
}
// IterViewport returns a channel to loop over pixels in the viewport.
func (a *RLEAccessor) IterViewport(viewport render.Rect) <-chan Pixel {
return a.acc.IterViewport(viewport)
}
// Iter returns a channel to loop over all points in this chunk.
func (a *RLEAccessor) Iter() <-chan Pixel {
return a.acc.Iter()
}
// Get a pixel from the map.
func (a *RLEAccessor) Get(p render.Point) (*Swatch, error) {
return a.acc.Get(p)
}
// Set a pixel on the map.
func (a *RLEAccessor) Set(p render.Point, sw *Swatch) error {
return a.acc.Set(p, sw)
}
// Delete a pixel from the map.
func (a *RLEAccessor) Delete(p render.Point) error {
return a.acc.Delete(p)
}
/*
MarshalBinary converts the chunk data to a binary representation.
This accessor uses Run Length Encoding (RLE) in its binary format. Starting
with the top-left pixel of this chunk, the binary format is a stream of bytes
formatted as such:
- UVarint for the palette index number (0-255), with 0xFFFF meaning void
- UVarint for the length of repetition of that palette index
*/
func (a *RLEAccessor) MarshalBinary() ([]byte, error) {
// Flatten the chunk out into a full 2D array of all its points.
var (
size = int(a.chunk.Size)
grid, err = rle.NewGrid(size)
)
if err != nil {
return nil, err
}
// Populate the dense 2D array of its pixels.
for y, row := range grid {
for x := range row {
var (
relative = render.NewPoint(x, y)
absolute = FromRelativeCoordinate(relative, a.chunk.Point, a.chunk.Size)
swatch, err = a.Get(absolute)
)
if err != nil {
continue
}
var ptr = uint64(swatch.Index())
grid[relative.Y][relative.X] = &ptr
}
}
return grid.Compress()
}
// UnmarshalBinary will decode a compressed RLEAccessor byte stream.
func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error {
a.acc.mu.Lock()
defer a.acc.mu.Unlock()
// New format: decompress the byte stream.
// log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed))
grid, err := rle.NewGrid(int(a.chunk.Size))
if err != nil {
return err
}
if err := grid.Decompress(compressed); err != nil {
return err
}
// Load the grid into our MapAccessor.
a.acc.Reset()
for y, row := range grid {
for x, col := range row {
if col == nil {
continue
}
abs := FromRelativeCoordinate(render.NewPoint(x, y), a.chunk.Point, a.chunk.Size)
a.acc.grid[abs] = NewSparseSwatch(int(*col))
}
}
return nil
}

View File

@ -129,8 +129,10 @@ func TestChunker(t *testing.T) {
// Test the map chunk accessor. // Test the map chunk accessor.
func TestMapAccessor(t *testing.T) { func TestMapAccessor(t *testing.T) {
a := level.NewMapAccessor() var (
_ = a c = level.NewChunk()
a = level.NewMapAccessor(c)
)
// Test action types // Test action types
var ( var (
@ -242,54 +244,88 @@ func TestChunkCoordinates(t *testing.T) {
c := level.NewChunker(128) c := level.NewChunker(128)
type testCase struct { type testCase struct {
In render.Point WorldCoordinate render.Point
Expect render.Point ChunkCoordinate render.Point
RelativeCoordinate render.Point
} }
tests := []testCase{ tests := []testCase{
testCase{ testCase{
In: render.NewPoint(0, 0), WorldCoordinate: render.NewPoint(0, 0),
Expect: render.NewPoint(0, 0), ChunkCoordinate: render.NewPoint(0, 0),
RelativeCoordinate: render.NewPoint(0, 0),
}, },
testCase{ testCase{
In: render.NewPoint(128, 128), WorldCoordinate: render.NewPoint(4, 8),
Expect: render.NewPoint(0, 0), ChunkCoordinate: render.NewPoint(0, 0),
RelativeCoordinate: render.NewPoint(4, 8),
}, },
testCase{ testCase{
In: render.NewPoint(1024, 128), WorldCoordinate: render.NewPoint(128, 128),
Expect: render.NewPoint(1, 0), ChunkCoordinate: render.NewPoint(1, 1),
RelativeCoordinate: render.NewPoint(0, 0),
}, },
testCase{ testCase{
In: render.NewPoint(3600, 1228), WorldCoordinate: render.NewPoint(130, 156),
Expect: render.NewPoint(3, 1), ChunkCoordinate: render.NewPoint(1, 1),
RelativeCoordinate: render.NewPoint(2, 28),
}, },
testCase{ testCase{
In: render.NewPoint(-100, -1), WorldCoordinate: render.NewPoint(1024, 128),
Expect: render.NewPoint(-1, -1), ChunkCoordinate: render.NewPoint(8, 1),
RelativeCoordinate: render.NewPoint(0, 0),
}, },
testCase{ testCase{
In: render.NewPoint(-950, 100), WorldCoordinate: render.NewPoint(3600, 1228),
Expect: render.NewPoint(-1, 0), ChunkCoordinate: render.NewPoint(28, 9),
RelativeCoordinate: render.NewPoint(16, 76),
}, },
testCase{ testCase{
In: render.NewPoint(-1001, -856), WorldCoordinate: render.NewPoint(-100, -1),
Expect: render.NewPoint(-2, -1), ChunkCoordinate: render.NewPoint(-1, -1),
RelativeCoordinate: render.NewPoint(28, 127),
}, },
testCase{ testCase{
In: render.NewPoint(-3600, -4800), WorldCoordinate: render.NewPoint(-950, 100),
Expect: render.NewPoint(-4, -5), ChunkCoordinate: render.NewPoint(-8, 0),
RelativeCoordinate: render.NewPoint(74, 100),
},
testCase{
WorldCoordinate: render.NewPoint(-1001, -856),
ChunkCoordinate: render.NewPoint(-8, -7),
RelativeCoordinate: render.NewPoint(23, 40),
},
testCase{
WorldCoordinate: render.NewPoint(-3600, -4800),
ChunkCoordinate: render.NewPoint(-29, -38),
RelativeCoordinate: render.NewPoint(112, 64),
}, },
} }
for _, test := range tests { for _, test := range tests {
actual := c.ChunkCoordinate(test.In) // Test conversion from world to chunk coordinate.
if actual != test.Expect { actual := c.ChunkCoordinate(test.WorldCoordinate)
if actual != test.ChunkCoordinate {
t.Errorf( t.Errorf(
"Failed ChunkCoordinate conversion:\n"+ "Failed ChunkCoordinate conversion:\n"+
" Input: %s\n"+ " Input: %s\n"+
"Expected: %s\n"+ "Expected: %s\n"+
" Got: %s", " Got: %s",
test.In, test.WorldCoordinate,
test.Expect, test.ChunkCoordinate,
actual,
)
}
// Test the relative (inside-chunk) coordinate.
actual = level.RelativeCoordinate(test.WorldCoordinate, actual, c.Size)
if actual != test.RelativeCoordinate {
t.Errorf(
"Failed RelativeCoordinate conversion:\n"+
" Input: %s\n"+
"Expected: %s\n"+
" Got: %s",
test.WorldCoordinate,
test.RelativeCoordinate,
actual, actual,
) )
} }

View File

@ -325,7 +325,7 @@ func (c *Chunker) GetChunk(p render.Point) (*Chunk, bool) {
// Hit the zipfile for it. // Hit the zipfile for it.
if c.Zipfile != nil { if c.Zipfile != nil {
if chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, p); err == nil { if chunk, err := c.ChunkFromZipfile(p); err == nil {
// log.Debug("GetChunk(%s) cache miss, read from zip", p) // log.Debug("GetChunk(%s) cache miss, read from zip", p)
c.SetChunk(p, chunk) // cache it c.SetChunk(p, chunk) // cache it
c.logChunkAccess(p, chunk) // for the LRU cache c.logChunkAccess(p, chunk) // for the LRU cache
@ -605,6 +605,51 @@ func (c *Chunker) ChunkCoordinate(abs render.Point) render.Point {
) )
} }
// RelativeCoordinate will translate from an absolute world coordinate, into one that
// is relative to fit inside of the chunk with the given chunk coordinate and size.
//
// Example:
//
// - With 128x128 chunks and a world coordinate of (280,-600)
// - The ChunkCoordinate would be (2,-4) which encompasses (256,-512) to (383,-639)
// - And relative inside that chunk, the pixel is at (24,)
func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uint8) render.Point {
// Pixel coordinate offset.
var (
size = int(chunkSize)
offset = render.Point{
X: chunkCoord.X * size,
Y: chunkCoord.Y * size,
}
point = render.Point{
X: abs.X - offset.X,
Y: abs.Y - offset.Y,
}
)
return point
}
// FromRelativeCoordinate is the inverse of RelativeCoordinate.
//
// With a chunk size of 128 and a relative coordinate like (8, 12),
// this function will return the absolute world coordinates based
// on your chunk.Point's placement in the level.
func FromRelativeCoordinate(rel render.Point, chunkCoord render.Point, chunkSize uint8) render.Point {
var (
size = int(chunkSize)
offset = render.Point{
X: chunkCoord.X * size,
Y: chunkCoord.Y * size,
}
)
return render.Point{
X: rel.X + offset.X,
Y: rel.Y + offset.Y,
}
}
// ChunkMap maps a chunk coordinate to its chunk data. // ChunkMap maps a chunk coordinate to its chunk data.
type ChunkMap map[render.Point]*Chunk type ChunkMap map[render.Point]*Chunk

View File

@ -0,0 +1,67 @@
package level
import (
"runtime"
"sync"
"git.kirsle.net/SketchyMaze/doodle/pkg/balance"
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
)
/* Functions to migrate Chunkers between different implementations. */
// OptimizeChunkerAccessors will evaluate all of the chunks of your drawing
// and possibly migrate them to a different Accessor implementation when
// saving on disk.
func (c *Chunker) OptimizeChunkerAccessors() {
// Parallelize this with goroutines.
var (
chunks = make(chan *Chunk, len(c.Chunks))
wg sync.WaitGroup
)
for range runtime.NumCPU() {
wg.Add(1)
go func() {
defer wg.Done()
for chunk := range chunks {
var point = chunk.Point
// Upgrade all MapTypes into RLE compressed MapTypes?
if balance.RLEBinaryChunkerEnabled {
if chunk.Type == MapType {
log.Info("Optimizing chunk %s accessor from Map to RLE", point)
ma, _ := chunk.Accessor.(*MapAccessor)
rle := NewRLEAccessor(chunk).FromMapAccessor(ma)
// Lock the chunker for updating.
c.chunkMu.Lock()
c.Chunks[point].Type = RLEType
c.Chunks[point].Accessor = rle
c.chunkMu.Unlock()
}
}
}
}()
}
// Feed it the chunks.
for point := range c.IterChunks() {
chunk, ok := c.GetChunk(point)
if !ok {
continue
}
chunks <- chunk
}
close(chunks)
wg.Wait()
}
// FromMapAccessor migrates from a MapAccessor to RLE.
func (a *RLEAccessor) FromMapAccessor(ma *MapAccessor) *RLEAccessor {
return &RLEAccessor{
chunk: a.chunk,
acc: ma,
}
}

View File

@ -228,3 +228,98 @@ func TestViewportChunks(t *testing.T) {
} }
} }
} }
func TestRelativeCoordinates(t *testing.T) {
var (
chunker = level.NewChunker(128)
)
type TestCase struct {
WorldCoord render.Point
ChunkCoord render.Point
ExpectRelative render.Point
}
var tests = []TestCase{
{
WorldCoord: render.NewPoint(4, 8),
ExpectRelative: render.NewPoint(4, 8),
},
{
WorldCoord: render.NewPoint(128, 128),
ExpectRelative: render.NewPoint(0, 0),
},
{
WorldCoord: render.NewPoint(143, 144),
ExpectRelative: render.NewPoint(15, 16),
},
{
WorldCoord: render.NewPoint(-105, -86),
ExpectRelative: render.NewPoint(23, 42),
},
{
WorldCoord: render.NewPoint(-252, 264),
ExpectRelative: render.NewPoint(4, 8),
},
// These were seen breaking actual levels, at the corners of the chunk
{
WorldCoord: render.NewPoint(511, 256),
ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game
},
{
WorldCoord: render.NewPoint(511, 512),
ChunkCoord: render.NewPoint(4, 4),
ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game
},
{
WorldCoord: render.NewPoint(127, 384),
ChunkCoord: render.NewPoint(1, 3),
ExpectRelative: render.NewPoint(-1, 0),
},
}
for i, test := range tests {
var (
chunkCoord = test.ChunkCoord
actualRelative = level.RelativeCoordinate(
test.WorldCoord,
chunkCoord,
chunker.Size,
)
roundTrip = level.FromRelativeCoordinate(
actualRelative,
chunkCoord,
chunker.Size,
)
)
// compute expected chunk coord automatically?
if chunkCoord == render.Origin {
chunkCoord = chunker.ChunkCoordinate(test.WorldCoord)
}
if actualRelative != test.ExpectRelative {
t.Errorf("Test %d: world coord %s in chunk %s\n"+
"Expected RelativeCoordinate() to be: %s\n"+
"But it was: %s",
i,
test.WorldCoord,
chunkCoord,
test.ExpectRelative,
actualRelative,
)
}
if roundTrip != test.WorldCoord {
t.Errorf("Test %d: world coord %s in chunk %s\n"+
"Did not survive round trip! Expected: %s\n"+
"But it was: %s",
i,
test.WorldCoord,
chunkCoord,
test.WorldCoord,
roundTrip,
)
}
}
}

View File

@ -2,8 +2,9 @@ package level
import ( import (
"archive/zip" "archive/zip"
"errors"
"fmt" "fmt"
"io/ioutil" "io"
"regexp" "regexp"
"strconv" "strconv"
@ -92,7 +93,7 @@ func (c *Chunker) MigrateZipfile(zf *zip.Writer) error {
} }
// Verify that this chunk file in the old ZIP was not empty. // Verify that this chunk file in the old ZIP was not empty.
chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, point) chunk, err := c.ChunkFromZipfile(point)
if err == nil && chunk.Len() == 0 { if err == nil && chunk.Len() == 0 {
log.Debug("Skip chunk %s (old zipfile chunk was empty)", coord) log.Debug("Skip chunk %s (old zipfile chunk was empty)", coord)
continue continue
@ -190,11 +191,7 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error {
data = bytes data = bytes
} }
} else { } else {
if json, err := c.MarshalJSON(); err != nil { return errors.New("Chunk.ToZipfile: JSON chunk format no longer supported for writing")
return err
} else {
data = json
}
} }
// Write the file contents to zip whether binary or json. // Write the file contents to zip whether binary or json.
@ -208,44 +205,64 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error {
} }
// ChunkFromZipfile loads a chunk from a zipfile. // ChunkFromZipfile loads a chunk from a zipfile.
func ChunkFromZipfile(zf *zip.Reader, layer int, coord render.Point) (*Chunk, error) { func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) {
// File names? // Grab the chunk (bin or json) from the Zipfile.
var ( ext, bin, err := c.RawChunkFromZipfile(coord)
binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord)
jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord)
chunk = NewChunk()
)
// Read from the new binary format.
if file, err := zf.Open(binfile); err == nil {
// log.Debug("Reading binary compressed chunk from %s", binfile)
bin, err := ioutil.ReadAll(file)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var chunk = NewChunk()
chunk.Point = coord
chunk.Size = c.Size
switch ext {
case ".bin":
// New style .bin compressed format:
// Either a MapAccessor compressed bin, or RLE compressed.
err = chunk.UnmarshalBinary(bin) err = chunk.UnmarshalBinary(bin)
if err != nil { if err != nil {
log.Error("ChunkFromZipfile(%s): %s", coord, err)
return nil, err return nil, err
} }
} else if file, err := zf.Open(jsonfile); err == nil { case ".json":
// log.Debug("Reading JSON encoded chunk from %s", jsonfile) // Legacy style plain .json file (MapAccessor only).
bin, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
err = chunk.UnmarshalJSON(bin) err = chunk.UnmarshalJSON(bin)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} else { default:
return nil, err return nil, fmt.Errorf("unexpected filetype found for this chunk: %s", ext)
} }
return chunk, nil return chunk, nil
} }
// RawChunkFromZipfile loads a chunk from a zipfile and returns its raw binary content.
//
// Returns the file extension (".bin" or ".json"), raw bytes, and an error.
func (c *Chunker) RawChunkFromZipfile(coord render.Point) (string, []byte, error) {
// File names?
var (
zf = c.Zipfile
layer = c.Layer
binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord)
jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord)
)
// Read from the new binary format.
if file, err := zf.Open(binfile); err == nil {
data, err := io.ReadAll(file)
return ".bin", data, err
} else if file, err := zf.Open(jsonfile); err == nil {
data, err := io.ReadAll(file)
return ".json", data, err
}
return "", nil, errors.New("not found in zipfile")
}
// ChunksInZipfile returns the list of chunk coordinates in a zipfile. // ChunksInZipfile returns the list of chunk coordinates in a zipfile.
func ChunksInZipfile(zf *zip.Reader, layer int) []render.Point { func ChunksInZipfile(zf *zip.Reader, layer int) []render.Point {
var ( var (

View File

@ -4,6 +4,21 @@ import "git.kirsle.net/SketchyMaze/doodle/pkg/log"
// Maintenance functions for the file format on disk. // Maintenance functions for the file format on disk.
// Vacuum runs any maintenance or migration tasks for the level at time of save.
//
// It will prune broken links between actors, or migrate internal data structures
// to optimize storage on disk of its binary data.
func (m *Level) Vacuum() error {
if links := m.PruneLinks(); links > 0 {
log.Debug("Vacuum: removed %d broken links between actors in this level.")
}
// Let the Chunker optimize accessor types.
m.Chunker.OptimizeChunkerAccessors()
return nil
}
// PruneLinks cleans up any Actor Links that can not be resolved in the // PruneLinks cleans up any Actor Links that can not be resolved in the
// level data. For example, if actors were linked in Edit Mode and one // level data. For example, if actors were linked in Edit Mode and one
// actor is deleted leaving a broken link. // actor is deleted leaving a broken link.

View File

@ -3,6 +3,7 @@ package level
import ( import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os"
"runtime" "runtime"
"strings" "strings"
@ -96,7 +97,9 @@ func (m *Level) WriteFile(filename string) error {
m.GameVersion = branding.Version m.GameVersion = branding.Version
// Maintenance functions, clean up cruft before save. // Maintenance functions, clean up cruft before save.
m.PruneLinks() if err := m.Vacuum(); err != nil {
log.Error("Vacuum level %s: %s", filename, err)
}
bin, err := m.ToJSON() bin, err := m.ToJSON()
if err != nil { if err != nil {
@ -115,7 +118,7 @@ func (m *Level) WriteFile(filename string) error {
} }
// Desktop: write to disk. // Desktop: write to disk.
err = ioutil.WriteFile(filename, bin, 0644) err = os.WriteFile(filename, bin, 0644)
if err != nil { if err != nil {
return fmt.Errorf("level.WriteFile: %s", err) return fmt.Errorf("level.WriteFile: %s", err)
} }

201
pkg/level/rle/rle.go Normal file
View File

@ -0,0 +1,201 @@
// Package rle contains support for Run-Length Encoding of level chunks.
package rle
import (
"bytes"
"encoding/binary"
"errors"
"strings"
"git.kirsle.net/go/render"
)
const NullColor = 0xFFFF
// Grid is a 2D array of nullable integers to store a flat bitmap of a chunk.
type Grid [][]*uint64
// NewGrid will return an initialized 2D grid of equal dimensions of the given size.
//
// The grid is indexed in [Y][X] notation, or: by row first and then column.
func NewGrid(size int) (Grid, error) {
if size == 0 {
return nil, errors.New("no size given for RLE Grid: the chunker was probably not initialized")
}
var grid = make([][]*uint64, size)
for i := 0; i < size; i++ {
grid[i] = make([]*uint64, size)
}
return grid, nil
}
func MustGrid(size int) Grid {
grid, err := NewGrid(size)
if err != nil {
panic(err)
}
return grid
}
type Pixel struct {
Point render.Point
Palette int
}
// Size of the grid.
func (g Grid) Size() int {
return len(g[0])
}
// Compress the grid into a byte stream of RLE compressed data.
//
// The compressed format is a stream of:
//
// - A Uvarint for the palette index (0-255) or 0xffff (65535) for null.
// - A Uvarint for how many pixels to repeat that color.
func (g Grid) Compress() ([]byte, error) {
// log.Error("BEGIN Compress()")
// log.Warn("Visualized:\n%s", g.Visualize())
// Run-length encode the grid.
var (
compressed []byte // final result
lastColor uint64 // last color seen (current streak)
runLength uint64 // current streak for the last color
buffering bool // detect end of grid
// Flush the buffer
flush = func() {
// log.Info("flush: %d for %d length", lastColor, runLength)
compressed = binary.AppendUvarint(compressed, lastColor)
compressed = binary.AppendUvarint(compressed, runLength)
}
)
for y, row := range g {
for x, nullableIndex := range row {
var index uint64
if nullableIndex == nil {
index = NullColor
} else {
index = *nullableIndex
}
// First color of the grid
if y == 0 && x == 0 {
// log.Info("First color @ %dx%d is %d", x, y, index)
lastColor = index
runLength = 1
continue
}
// Buffer it until we get a change of color or EOF.
if index != lastColor {
// log.Info("Color %d streaks for %d until %dx%d", lastColor, runLength, x, y)
flush()
lastColor = index
runLength = 1
buffering = false
continue
}
buffering = true
runLength++
}
}
// Flush the final buffer when we got to EOF on the grid.
if buffering {
flush()
}
// log.Error("RLE compressed: %v", compressed)
return compressed, nil
}
// Decompress the RLE byte stream back into a populated 2D grid.
func (g Grid) Decompress(compressed []byte) error {
// log.Error("BEGIN Decompress() Length of stream: %d", len(compressed))
// log.Warn("Visualized:\n%s", g.Visualize())
// Prepare the 2D grid to decompress the RLE stream into.
var (
size = g.Size()
x, y = -1, -1
cursor int
)
var reader = bytes.NewBuffer(compressed)
for {
var (
paletteIndexRaw, err1 = binary.ReadUvarint(reader)
repeatCount, err2 = binary.ReadUvarint(reader)
)
if err1 != nil || err2 != nil {
break
}
// Handle the null color.
var paletteIndex *uint64
if paletteIndexRaw != NullColor {
paletteIndex = &paletteIndexRaw
}
// log.Warn("RLE index %v for %dpx - coord=%d,%d", paletteIndexRaw, repeatCount, x, y)
for i := uint64(0); i < repeatCount; i++ {
if cursor%size == 0 {
y++
x = 0
}
point := render.NewPoint(int(x), int(y))
g[point.Y][point.X] = paletteIndex
x++
cursor++
}
}
// log.Warn("Visualized:\n%s", g.Visualize())
return nil
}
// Visualize the state of the 2D grid.
func (g Grid) Visualize() string {
var lines []string
for _, row := range g {
var line = "["
for _, col := range row {
if col == nil {
line += " "
} else {
line += Alphabetize(col)
}
}
lines = append(lines, line+"]")
}
return strings.Join(lines, "\n")
}
const alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
// Alphabetize converts a palette index value into a single character for
// Visualize to display.
//
// It supports up to 36 palette indexes before it will wrap back around and
// begin reusing symbols.
func Alphabetize(value *uint64) string {
if value == nil {
return " "
}
var i = int(*value)
return string(alphabet[i%len(alphabet)])
}

43
pkg/level/rle/rle_test.go Normal file
View File

@ -0,0 +1,43 @@
package rle_test
import (
"testing"
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
)
func TestRLE(t *testing.T) {
// Test a completely filled grid.
var (
grid = rle.MustGrid(128)
color = uint64(5)
)
for y := range grid {
for x := range y {
grid[y][x] = &color
}
}
// Compress and decompress it.
var (
compressed, _ = grid.Compress()
grid2 = rle.MustGrid(128)
)
grid2.Decompress(compressed)
// Ensure our color is set everywhere.
for y := range grid {
for x := range y {
if grid[y][x] != &color {
t.Errorf("RLE compression didn't survive the round trip: %d,%d didn't save\n"+
" Expected: %d\n"+
" Actually: %v",
x, y,
color,
grid[y][x],
)
}
}
}
}

View File

@ -420,7 +420,7 @@ func (w *Canvas) loopEditable(ev *event.State) error {
baseColor, err := chunker.Get(cursor) baseColor, err := chunker.Get(cursor)
if err != nil { if err != nil {
limit = balance.FloodToolVoidLimit limit = balance.FloodToolVoidLimit
log.Warn("FloodTool: couldn't get base color at %s: %s (got %s)", cursor, err, baseColor.Color) log.Warn("FloodTool: couldn't get base color at %s: %s (got %+v)", cursor, err, baseColor)
} }
// If no change, do nothing. // If no change, do nothing.