RLE Compression for File Formats #95

Merged
kirsle merged 6 commits from rle-compression into master 2024-05-24 23:48:00 +00:00
7 changed files with 167 additions and 85 deletions
Showing only changes of commit 4851730ccf - Show all commits

View File

@ -21,27 +21,10 @@ func init() {
Usage: "load and re-save a level or doodad file to migrate to newer file format versions",
ArgsUsage: "<.level or .doodad>",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "actors",
Usage: "print verbose actor data in Level files",
},
&cli.BoolFlag{
Name: "chunks",
Usage: "print verbose data about all the pixel chunks in a file",
},
&cli.BoolFlag{
Name: "script",
Usage: "print the script from a doodad file and exit",
},
&cli.StringFlag{
Name: "attachment",
Aliases: []string{"a"},
Usage: "print the contents of the attached filename to terminal",
},
&cli.BoolFlag{
Name: "verbose",
Aliases: []string{"v"},
Usage: "print verbose output (all verbose flags enabled)",
Name: "output",
Aliases: []string{"o"},
Usage: "write to a different file than the input",
},
},
Action: func(c *cli.Context) error {
@ -84,6 +67,18 @@ func resaveLevel(c *cli.Context, filename string) error {
log.Info("Loaded level from file: %s", filename)
log.Info("Last saved game version: %s", lvl.GameVersion)
// Different output filename?
if output := c.String("output"); output != "" {
log.Info("Output will be saved to: %s", output)
filename = output
}
if err := lvl.Vacuum(); err != nil {
log.Error("Vacuum error: %s", err)
} else {
log.Info("Run vacuum on level file.")
}
log.Info("Saving back to disk")
if err := lvl.WriteJSON(filename); err != nil {
return fmt.Errorf("couldn't write %s: %s", filename, err)
@ -100,6 +95,12 @@ func resaveDoodad(c *cli.Context, filename string) error {
log.Info("Loaded doodad from file: %s", filename)
log.Info("Last saved game version: %s", dd.GameVersion)
// Different output filename?
if output := c.String("output"); output != "" {
log.Info("Output will be saved to: %s", output)
filename = output
}
log.Info("Saving back to disk")
if err := dd.WriteJSON(filename); err != nil {
return fmt.Errorf("couldn't write %s: %s", filename, err)

View File

@ -1,6 +1,8 @@
package commands
import (
"bytes"
"encoding/binary"
"fmt"
"path/filepath"
"sort"
@ -9,6 +11,7 @@ import (
"git.kirsle.net/SketchyMaze/doodle/pkg/doodads"
"git.kirsle.net/SketchyMaze/doodle/pkg/enum"
"git.kirsle.net/SketchyMaze/doodle/pkg/level"
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
"github.com/urfave/cli/v2"
)
@ -44,6 +47,14 @@ func init() {
Aliases: []string{"v"},
Usage: "print verbose output (all verbose flags enabled)",
},
&cli.BoolFlag{
Name: "visualize-rle",
Usage: "visually dump RLE encoded chunks to the terminal (VERY noisy for large drawings!)",
},
&cli.StringFlag{
Name: "chunk",
Usage: "specific chunk coordinate; when debugging chunks, only show this chunk (example: 2,-1)",
},
},
Action: func(c *cli.Context) error {
if c.NArg() < 1 {
@ -263,6 +274,10 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
chunkSize = int(ch.Size)
width = worldSize.W - worldSize.X
height = worldSize.H - worldSize.Y
// Chunk debugging CLI options.
visualize = c.Bool("visualize-rle")
specificChunk = c.String("chunk")
)
fmt.Println("Chunks:")
fmt.Printf(" Pixels Per Chunk: %d^2\n", ch.Size)
@ -278,7 +293,18 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
// Verbose chunk information.
if c.Bool("chunks") || c.Bool("verbose") {
fmt.Println(" Chunk Details:")
for point, chunk := range ch.Chunks {
for point := range ch.IterChunks() {
// Debugging specific chunk coordinate?
if specificChunk != "" && point.String() != specificChunk {
log.Warn("Skip chunk %s: not the specific chunk you're looking for", point)
continue
}
chunk, ok := ch.GetChunk(point)
if !ok {
continue
}
fmt.Printf(" - Coord: %s\n", point)
fmt.Printf(" Type: %s\n", chunkTypeToName(chunk.Type))
fmt.Printf(" Range: (%d,%d) ... (%d,%d)\n",
@ -287,6 +313,33 @@ func showChunker(c *cli.Context, ch *level.Chunker) {
(int(point.X)*chunkSize)+chunkSize,
(int(point.Y)*chunkSize)+chunkSize,
)
fmt.Printf(" Usage: %f (%d len of %d)\n", chunk.Usage(), chunk.Len(), chunkSize*chunkSize)
// Visualize the RLE encoded chunks?
if visualize && chunk.Type == level.RLEType {
ext, bin, err := ch.RawChunkFromZipfile(point)
if err != nil {
log.Error(err.Error())
continue
} else if ext != ".bin" {
log.Error("Unexpected filetype for RLE compressed chunk (expected .bin, got %s)", ext)
continue
}
// Read off the first byte (chunk type)
var reader = bytes.NewBuffer(bin)
binary.ReadUvarint(reader)
bin = reader.Bytes()
grid, err := rle.NewGrid(chunkSize)
if err != nil {
log.Error(err.Error())
continue
}
grid.Decompress(bin)
fmt.Println(grid.Visualize())
}
}
} else {
fmt.Println(" Use -chunks or -verbose to serialize Chunks")
@ -298,6 +351,8 @@ func chunkTypeToName(v uint64) string {
switch v {
case level.MapType:
return "map"
case level.RLEType:
return "rle map"
case level.GridType:
return "grid"
default:

View File

@ -330,8 +330,9 @@ func (c *Chunk) SizePositive() render.Rect {
}
// Usage returns the percent of free space vs. allocated pixels in the chunk.
func (c *Chunk) Usage(size int) float64 {
return float64(c.Len()) / float64(size)
func (c *Chunk) Usage() float64 {
size := float64(c.Size)
return float64(c.Len()) / (size * size)
}
// UnmarshalJSON loads the chunk from JSON and uses the correct accessor to

View File

@ -77,18 +77,21 @@ func (a *RLEAccessor) MarshalBinary() ([]byte, error) {
}
// Populate the dense 2D array of its pixels.
for px := range a.Iter() {
var (
point = render.NewPoint(px.X, px.Y)
relative = RelativeCoordinate(point, a.chunk.Point, a.chunk.Size)
ptr = uint64(px.Swatch.Index())
)
for y, row := range grid {
for x := range row {
var (
relative = render.NewPoint(x, y)
absolute = FromRelativeCoordinate(relative, a.chunk.Point, a.chunk.Size)
swatch, err = a.Get(absolute)
)
// TODO: sometimes we get a -1 value in X or Y, not sure why.
if relative.X < 0 || relative.Y < 0 {
continue
if err != nil {
continue
}
var ptr = uint64(swatch.Index())
grid[relative.Y][relative.X] = &ptr
}
grid[relative.Y][relative.X] = &ptr
}
return grid.Compress()
@ -119,10 +122,7 @@ func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error {
continue
}
// TODO: x-1 to avoid the level creeping to the right every save,
// not sure on the root cause! RLEAccessor Decompress?
abs := FromRelativeCoordinate(render.NewPoint(x, y), a.chunk.Point, a.chunk.Size)
abs.X -= 1
a.acc.grid[abs] = NewSparseSwatch(int(*col))
}
}

View File

@ -14,9 +14,6 @@ import (
// and possibly migrate them to a different Accessor implementation when
// saving on disk.
func (c *Chunker) OptimizeChunkerAccessors() {
c.chunkMu.Lock()
defer c.chunkMu.Unlock()
log.Info("Optimizing Chunker Accessors")
// TODO: parallelize this with goroutines
@ -31,7 +28,6 @@ func (c *Chunker) OptimizeChunkerAccessors() {
defer wg.Done()
for chunk := range chunks {
var point = chunk.Point
log.Warn("Chunk %s is a: %d", point, chunk.Type)
// Upgrade all MapTypes into RLE compressed MapTypes?
if balance.RLEBinaryChunkerEnabled {
@ -49,7 +45,11 @@ func (c *Chunker) OptimizeChunkerAccessors() {
}
// Feed it the chunks.
for _, chunk := range c.Chunks {
for point := range c.IterChunks() {
chunk, ok := c.GetChunk(point)
if !ok {
continue
}
chunks <- chunk
}

View File

@ -4,7 +4,7 @@ import (
"archive/zip"
"errors"
"fmt"
"io/ioutil"
"io"
"regexp"
"strconv"
@ -206,6 +206,42 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error {
// ChunkFromZipfile loads a chunk from a zipfile.
func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) {
// Grab the chunk (bin or json) from the Zipfile.
ext, bin, err := c.RawChunkFromZipfile(coord)
if err != nil {
return nil, err
}
var chunk = NewChunk()
chunk.Point = coord
chunk.Size = c.Size
switch ext {
case ".bin":
// New style .bin compressed format:
// Either a MapAccessor compressed bin, or RLE compressed.
err = chunk.UnmarshalBinary(bin)
if err != nil {
log.Error("ChunkFromZipfile(%s): %s", coord, err)
return nil, err
}
case ".json":
// Legacy style plain .json file (MapAccessor only).
err = chunk.UnmarshalJSON(bin)
if err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("unexpected filetype found for this chunk: %s", ext)
}
return chunk, nil
}
// RawChunkFromZipfile loads a chunk from a zipfile and returns its raw binary content.
//
// Returns the file extension (".bin" or ".json"), raw bytes, and an error.
func (c *Chunker) RawChunkFromZipfile(coord render.Point) (string, []byte, error) {
// File names?
var (
zf = c.Zipfile
@ -213,41 +249,18 @@ func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) {
binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord)
jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord)
chunk = NewChunk()
)
chunk.Point = coord
chunk.Size = c.Size
// Read from the new binary format.
if file, err := zf.Open(binfile); err == nil {
// log.Debug("Reading binary compressed chunk from %s", binfile)
bin, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
err = chunk.UnmarshalBinary(bin)
if err != nil {
log.Error("ChunkFromZipfile(%s): %s", coord, err)
return nil, err
}
data, err := io.ReadAll(file)
return ".bin", data, err
} else if file, err := zf.Open(jsonfile); err == nil {
// log.Debug("Reading JSON encoded chunk from %s", jsonfile)
bin, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
err = chunk.UnmarshalJSON(bin)
if err != nil {
return nil, err
}
} else {
return nil, err
data, err := io.ReadAll(file)
return ".json", data, err
}
return chunk, nil
return "", nil, errors.New("not found in zipfile")
}
// ChunksInZipfile returns the list of chunk coordinates in a zipfile.

View File

@ -5,10 +5,8 @@ import (
"bytes"
"encoding/binary"
"errors"
"fmt"
"strings"
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
"git.kirsle.net/go/render"
)
@ -25,9 +23,9 @@ func NewGrid(size int) (Grid, error) {
return nil, errors.New("no size given for RLE Grid: the chunker was probably not initialized")
}
var grid = make([][]*uint64, size+1)
for i := 0; i < size+1; i++ {
grid[i] = make([]*uint64, size+1)
var grid = make([][]*uint64, size)
for i := 0; i < size; i++ {
grid[i] = make([]*uint64, size)
}
return grid, nil
@ -58,7 +56,7 @@ func (g Grid) Size() int {
// - A Uvarint for the palette index (0-255) or 0xffff (65535) for null.
// - A Uvarint for how many pixels to repeat that color.
func (g Grid) Compress() ([]byte, error) {
log.Error("BEGIN Compress()")
// log.Error("BEGIN Compress()")
// log.Warn("Visualized:\n%s", g.Visualize())
// Run-length encode the grid.
@ -120,13 +118,14 @@ func (g Grid) Compress() ([]byte, error) {
// Decompress the RLE byte stream back into a populated 2D grid.
func (g Grid) Decompress(compressed []byte) error {
log.Error("BEGIN Decompress()")
// log.Error("BEGIN Decompress() Length of stream: %d", len(compressed))
// log.Warn("Visualized:\n%s", g.Visualize())
// Prepare the 2D grid to decompress the RLE stream into.
var (
size = g.Size()
x, y, cursor int
size = g.Size()
x, y = -1, -1
cursor int
)
var reader = bytes.NewBuffer(compressed)
@ -147,22 +146,19 @@ func (g Grid) Decompress(compressed []byte) error {
paletteIndex = &paletteIndexRaw
}
// log.Warn("RLE index %v for %dpx", paletteIndexRaw, repeatCount)
// log.Warn("RLE index %v for %dpx - coord=%d,%d", paletteIndexRaw, repeatCount, x, y)
for i := uint64(0); i < repeatCount; i++ {
cursor++
if cursor%size == 0 {
y++
x = 0
}
point := render.NewPoint(int(x), int(y))
if point.Y >= size || point.X >= size {
continue
}
g[point.Y][point.X] = paletteIndex
x++
cursor++
}
}
@ -180,10 +176,26 @@ func (g Grid) Visualize() string {
if col == nil {
line += " "
} else {
line += fmt.Sprintf("%x", *col)
line += Alphabetize(col)
}
}
lines = append(lines, line+"]")
}
return strings.Join(lines, "\n")
}
const alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
// Alphabetize converts a palette index value into a single character for
// Visualize to display.
//
// It supports up to 36 palette indexes before it will wrap back around and
// begin reusing symbols.
func Alphabetize(value *uint64) string {
if value == nil {
return " "
}
var i = int(*value)
return string(alphabet[i%len(alphabet)])
}