Compare commits
2 Commits
57c76de679
...
5654145fd8
Author | SHA1 | Date | |
---|---|---|---|
5654145fd8 | |||
b1d7c7a384 |
|
@ -101,6 +101,11 @@ func main() {
|
|||
Name: "chdir",
|
||||
Usage: "working directory for the game's runtime package",
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "new",
|
||||
Aliases: []string{"n"},
|
||||
Usage: "open immediately to the level editor",
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "edit",
|
||||
Aliases: []string{"e"},
|
||||
|
@ -248,6 +253,8 @@ func main() {
|
|||
|
||||
if c.Bool("guitest") {
|
||||
game.Goto(&doodle.GUITestScene{})
|
||||
} else if c.Bool("new") {
|
||||
game.NewMap()
|
||||
} else if filename != "" {
|
||||
if c.Bool("edit") {
|
||||
game.EditFile(filename)
|
||||
|
|
|
@ -28,6 +28,12 @@ const (
|
|||
// If you set both flags to false, level zipfiles will use the classic
|
||||
// json chunk format as before on save.
|
||||
BinaryChunkerEnabled = true
|
||||
|
||||
// Enable "v3" Run-Length Encoding for level chunker.
|
||||
//
|
||||
// This only supports Zipfile levels and will use the ".bin" format
|
||||
// enabled by the previous setting.
|
||||
RLEBinaryChunkerEnabled = true
|
||||
)
|
||||
|
||||
// Feature Flags to turn on/off experimental content.
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"bytes"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"image"
|
||||
"math"
|
||||
|
@ -19,9 +20,13 @@ import (
|
|||
// Types of chunks.
|
||||
const (
|
||||
MapType uint64 = iota
|
||||
RLEType
|
||||
GridType
|
||||
)
|
||||
|
||||
// Default chunk type for newly created chunks (was MapType).
|
||||
const DefaultChunkType = RLEType
|
||||
|
||||
// Chunk holds a single portion of the pixel canvas.
|
||||
type Chunk struct {
|
||||
Type uint64 // map vs. 2D array.
|
||||
|
@ -62,16 +67,15 @@ type Accessor interface {
|
|||
Len() int
|
||||
MarshalBinary() ([]byte, error)
|
||||
UnmarshalBinary([]byte) error
|
||||
MarshalJSON() ([]byte, error)
|
||||
UnmarshalJSON([]byte) error
|
||||
}
|
||||
|
||||
// NewChunk creates a new chunk.
|
||||
func NewChunk() *Chunk {
|
||||
return &Chunk{
|
||||
Type: MapType,
|
||||
Accessor: NewMapAccessor(),
|
||||
var c = &Chunk{
|
||||
Type: RLEType,
|
||||
}
|
||||
c.Accessor = NewRLEAccessor(c)
|
||||
return c
|
||||
}
|
||||
|
||||
// Texture will return a cached texture for the rendering engine for this
|
||||
|
@ -330,27 +334,13 @@ func (c *Chunk) Usage(size int) float64 {
|
|||
return float64(c.Len()) / float64(size)
|
||||
}
|
||||
|
||||
// MarshalJSON writes the chunk to JSON.
|
||||
//
|
||||
// DEPRECATED: MarshalBinary will encode chunks to a tighter binary format.
|
||||
func (c *Chunk) MarshalJSON() ([]byte, error) {
|
||||
data, err := c.Accessor.MarshalJSON()
|
||||
if err != nil {
|
||||
return []byte{}, err
|
||||
}
|
||||
|
||||
generic := &JSONChunk{
|
||||
Type: c.Type,
|
||||
Data: data,
|
||||
}
|
||||
b, err := json.Marshal(generic)
|
||||
return b, err
|
||||
}
|
||||
|
||||
// UnmarshalJSON loads the chunk from JSON and uses the correct accessor to
|
||||
// parse the inner details.
|
||||
//
|
||||
// DEPRECATED in favor of binary marshalling.
|
||||
//
|
||||
// Only supports MapAccessor chunk types, which was the only one supported
|
||||
// before this function was deprecated.
|
||||
func (c *Chunk) UnmarshalJSON(b []byte) error {
|
||||
// Parse it generically so we can hand off the inner "data" object to the
|
||||
// right accessor for unmarshalling.
|
||||
|
@ -362,8 +352,11 @@ func (c *Chunk) UnmarshalJSON(b []byte) error {
|
|||
|
||||
switch c.Type {
|
||||
case MapType:
|
||||
c.Accessor = NewMapAccessor()
|
||||
return c.Accessor.UnmarshalJSON(generic.Data)
|
||||
c.Accessor = NewMapAccessor(c)
|
||||
if unmarshaler, ok := c.Accessor.(json.Unmarshaler); ok {
|
||||
return unmarshaler.UnmarshalJSON(generic.Data)
|
||||
}
|
||||
return errors.New("Chunk.UnmarshalJSON: this chunk doesn't support JSON unmarshaling")
|
||||
default:
|
||||
return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type)
|
||||
}
|
||||
|
@ -406,7 +399,12 @@ func (c *Chunk) UnmarshalBinary(b []byte) error {
|
|||
// Decode the rest of the byte stream.
|
||||
switch chunkType {
|
||||
case MapType:
|
||||
c.Accessor = NewMapAccessor()
|
||||
c.Type = MapType
|
||||
c.Accessor = NewMapAccessor(c)
|
||||
return c.Accessor.UnmarshalBinary(reader.Bytes())
|
||||
case RLEType:
|
||||
c.Type = RLEType
|
||||
c.Accessor = NewRLEAccessor(c)
|
||||
return c.Accessor.UnmarshalBinary(reader.Bytes())
|
||||
default:
|
||||
return fmt.Errorf("Chunk.UnmarshalJSON: unsupported chunk type '%d'", c.Type)
|
||||
|
|
|
@ -16,17 +16,24 @@ import (
|
|||
// MapAccessor implements a chunk accessor by using a map of points to their
|
||||
// palette indexes. This is the simplest accessor and is best for sparse chunks.
|
||||
type MapAccessor struct {
|
||||
grid map[render.Point]*Swatch
|
||||
mu sync.RWMutex
|
||||
chunk *Chunk // Pointer to parent struct, for its Size and Point
|
||||
grid map[render.Point]*Swatch
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
// NewMapAccessor initializes a MapAccessor.
|
||||
func NewMapAccessor() *MapAccessor {
|
||||
func NewMapAccessor(chunk *Chunk) *MapAccessor {
|
||||
return &MapAccessor{
|
||||
grid: map[render.Point]*Swatch{},
|
||||
chunk: chunk,
|
||||
grid: map[render.Point]*Swatch{},
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the MapAccessor.
|
||||
func (a *MapAccessor) Reset() {
|
||||
a.grid = map[render.Point]*Swatch{}
|
||||
}
|
||||
|
||||
// Inflate the sparse swatches from their palette indexes.
|
||||
func (a *MapAccessor) Inflate(pal *Palette) error {
|
||||
for point, swatch := range a.grid {
|
||||
|
@ -271,7 +278,7 @@ func (a *MapAccessor) UnmarshalBinary(compressed []byte) error {
|
|||
defer a.mu.Unlock()
|
||||
|
||||
// New format: decompress the byte stream.
|
||||
//log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed))
|
||||
log.Debug("MapAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed))
|
||||
|
||||
var reader = bytes.NewBuffer(compressed)
|
||||
|
||||
|
|
174
pkg/level/chunk_rle.go
Normal file
174
pkg/level/chunk_rle.go
Normal file
|
@ -0,0 +1,174 @@
|
|||
package level
|
||||
|
||||
import (
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
|
||||
"git.kirsle.net/go/render"
|
||||
)
|
||||
|
||||
// RLEAccessor implements a chunk accessor which stores its on-disk format using
|
||||
// Run Length Encoding (RLE), but in memory behaves equivalently to the MapAccessor.
|
||||
type RLEAccessor struct {
|
||||
chunk *Chunk // parent Chunk, for its Size and Point
|
||||
acc *MapAccessor
|
||||
}
|
||||
|
||||
// NewRLEAccessor initializes a RLEAccessor.
|
||||
func NewRLEAccessor(chunk *Chunk) *RLEAccessor {
|
||||
return &RLEAccessor{
|
||||
chunk: chunk,
|
||||
acc: NewMapAccessor(chunk),
|
||||
}
|
||||
}
|
||||
|
||||
// Inflate the sparse swatches from their palette indexes.
|
||||
func (a *RLEAccessor) Inflate(pal *Palette) error {
|
||||
return a.acc.Inflate(pal)
|
||||
}
|
||||
|
||||
// Len returns the current size of the map, or number of pixels registered.
|
||||
func (a *RLEAccessor) Len() int {
|
||||
return a.acc.Len()
|
||||
}
|
||||
|
||||
// IterViewport returns a channel to loop over pixels in the viewport.
|
||||
func (a *RLEAccessor) IterViewport(viewport render.Rect) <-chan Pixel {
|
||||
return a.acc.IterViewport(viewport)
|
||||
}
|
||||
|
||||
// Iter returns a channel to loop over all points in this chunk.
|
||||
func (a *RLEAccessor) Iter() <-chan Pixel {
|
||||
return a.acc.Iter()
|
||||
}
|
||||
|
||||
// Get a pixel from the map.
|
||||
func (a *RLEAccessor) Get(p render.Point) (*Swatch, error) {
|
||||
return a.acc.Get(p)
|
||||
}
|
||||
|
||||
// Set a pixel on the map.
|
||||
func (a *RLEAccessor) Set(p render.Point, sw *Swatch) error {
|
||||
return a.acc.Set(p, sw)
|
||||
}
|
||||
|
||||
// Delete a pixel from the map.
|
||||
func (a *RLEAccessor) Delete(p render.Point) error {
|
||||
return a.acc.Delete(p)
|
||||
}
|
||||
|
||||
/*
|
||||
MarshalBinary converts the chunk data to a binary representation.
|
||||
|
||||
This accessor uses Run Length Encoding (RLE) in its binary format. Starting
|
||||
with the top-left pixel of this chunk, the binary format is a stream of bytes
|
||||
formatted as such:
|
||||
|
||||
- UVarint for the palette index number (0-255), with 0xFF meaning void
|
||||
- UVarint for the length of repetition of that palette index
|
||||
*/
|
||||
func (a *RLEAccessor) MarshalBinary() ([]byte, error) {
|
||||
// Flatten the chunk out into a full 2D array of all its points.
|
||||
var (
|
||||
size = int(a.chunk.Size)
|
||||
grid, err = rle.NewGrid(size)
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Populate the dense 2D array of its pixels.
|
||||
for px := range a.Iter() {
|
||||
var (
|
||||
point = render.NewPoint(px.X, px.Y)
|
||||
relative = RelativeCoordinate(point, a.chunk.Point, a.chunk.Size)
|
||||
ptr = uint64(px.Swatch.Index())
|
||||
)
|
||||
|
||||
// TODO: sometimes we get a -1 value in X or Y, not sure why.
|
||||
if relative.X < 0 || relative.Y < 0 {
|
||||
continue
|
||||
}
|
||||
grid[relative.Y][relative.X] = &ptr
|
||||
}
|
||||
|
||||
return grid.Compress()
|
||||
}
|
||||
|
||||
// UnmarshalBinary will decode a compressed RLEAccessor byte stream.
|
||||
func (a *RLEAccessor) UnmarshalBinary(compressed []byte) error {
|
||||
a.acc.mu.Lock()
|
||||
defer a.acc.mu.Unlock()
|
||||
|
||||
// New format: decompress the byte stream.
|
||||
log.Debug("RLEAccessor.Unmarshal: Reading %d bytes of compressed chunk data", len(compressed))
|
||||
|
||||
grid, err := rle.NewGrid(int(a.chunk.Size))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := grid.Decompress(compressed); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Load the grid into our MapAccessor.
|
||||
a.acc.Reset()
|
||||
for y, row := range grid {
|
||||
for x, col := range row {
|
||||
if col == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: x-1 to avoid the level creeping to the right every save,
|
||||
// not sure on the root cause! RLEAccessor Decompress?
|
||||
abs := FromRelativeCoordinate(render.NewPoint(x, y), a.chunk.Point, a.chunk.Size)
|
||||
abs.X -= 1
|
||||
a.acc.grid[abs] = NewSparseSwatch(int(*col))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
// Prepare the 2D grid to decompress the RLE stream into.
|
||||
var (
|
||||
size = int(a.chunk.Size)
|
||||
_, err = rle.NewGrid(size)
|
||||
x, y, cursor int
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var reader = bytes.NewBuffer(compressed)
|
||||
|
||||
for {
|
||||
var (
|
||||
paletteIndex, err1 = binary.ReadUvarint(reader)
|
||||
repeatCount, err2 = binary.ReadUvarint(reader)
|
||||
)
|
||||
|
||||
if err1 != nil || err2 != nil {
|
||||
log.Error("reading Uvarints from compressed data: {%s, %s}", err1, err2)
|
||||
break
|
||||
}
|
||||
|
||||
log.Warn("RLE index %d for %dpx", paletteIndex, repeatCount)
|
||||
|
||||
for i := uint64(0); i < repeatCount; i++ {
|
||||
cursor++
|
||||
if cursor%size == 0 {
|
||||
y++
|
||||
x = 0
|
||||
} else {
|
||||
x++
|
||||
}
|
||||
|
||||
point := render.NewPoint(int(x), int(y))
|
||||
if paletteIndex != 0xFF {
|
||||
a.acc.grid[point] = NewSparseSwatch(int(paletteIndex))
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
|
@ -129,8 +129,10 @@ func TestChunker(t *testing.T) {
|
|||
|
||||
// Test the map chunk accessor.
|
||||
func TestMapAccessor(t *testing.T) {
|
||||
a := level.NewMapAccessor()
|
||||
_ = a
|
||||
var (
|
||||
c = level.NewChunk()
|
||||
a = level.NewMapAccessor(c)
|
||||
)
|
||||
|
||||
// Test action types
|
||||
var (
|
||||
|
@ -242,54 +244,88 @@ func TestChunkCoordinates(t *testing.T) {
|
|||
c := level.NewChunker(128)
|
||||
|
||||
type testCase struct {
|
||||
In render.Point
|
||||
Expect render.Point
|
||||
WorldCoordinate render.Point
|
||||
ChunkCoordinate render.Point
|
||||
RelativeCoordinate render.Point
|
||||
}
|
||||
tests := []testCase{
|
||||
testCase{
|
||||
In: render.NewPoint(0, 0),
|
||||
Expect: render.NewPoint(0, 0),
|
||||
WorldCoordinate: render.NewPoint(0, 0),
|
||||
ChunkCoordinate: render.NewPoint(0, 0),
|
||||
RelativeCoordinate: render.NewPoint(0, 0),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(128, 128),
|
||||
Expect: render.NewPoint(0, 0),
|
||||
WorldCoordinate: render.NewPoint(4, 8),
|
||||
ChunkCoordinate: render.NewPoint(0, 0),
|
||||
RelativeCoordinate: render.NewPoint(4, 8),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(1024, 128),
|
||||
Expect: render.NewPoint(1, 0),
|
||||
WorldCoordinate: render.NewPoint(128, 128),
|
||||
ChunkCoordinate: render.NewPoint(1, 1),
|
||||
RelativeCoordinate: render.NewPoint(0, 0),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(3600, 1228),
|
||||
Expect: render.NewPoint(3, 1),
|
||||
WorldCoordinate: render.NewPoint(130, 156),
|
||||
ChunkCoordinate: render.NewPoint(1, 1),
|
||||
RelativeCoordinate: render.NewPoint(2, 28),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(-100, -1),
|
||||
Expect: render.NewPoint(-1, -1),
|
||||
WorldCoordinate: render.NewPoint(1024, 128),
|
||||
ChunkCoordinate: render.NewPoint(8, 1),
|
||||
RelativeCoordinate: render.NewPoint(0, 0),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(-950, 100),
|
||||
Expect: render.NewPoint(-1, 0),
|
||||
WorldCoordinate: render.NewPoint(3600, 1228),
|
||||
ChunkCoordinate: render.NewPoint(28, 9),
|
||||
RelativeCoordinate: render.NewPoint(16, 76),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(-1001, -856),
|
||||
Expect: render.NewPoint(-2, -1),
|
||||
WorldCoordinate: render.NewPoint(-100, -1),
|
||||
ChunkCoordinate: render.NewPoint(-1, -1),
|
||||
RelativeCoordinate: render.NewPoint(28, 127),
|
||||
},
|
||||
testCase{
|
||||
In: render.NewPoint(-3600, -4800),
|
||||
Expect: render.NewPoint(-4, -5),
|
||||
WorldCoordinate: render.NewPoint(-950, 100),
|
||||
ChunkCoordinate: render.NewPoint(-8, 0),
|
||||
RelativeCoordinate: render.NewPoint(74, 100),
|
||||
},
|
||||
testCase{
|
||||
WorldCoordinate: render.NewPoint(-1001, -856),
|
||||
ChunkCoordinate: render.NewPoint(-8, -7),
|
||||
RelativeCoordinate: render.NewPoint(23, 40),
|
||||
},
|
||||
testCase{
|
||||
WorldCoordinate: render.NewPoint(-3600, -4800),
|
||||
ChunkCoordinate: render.NewPoint(-29, -38),
|
||||
RelativeCoordinate: render.NewPoint(112, 64),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
actual := c.ChunkCoordinate(test.In)
|
||||
if actual != test.Expect {
|
||||
// Test conversion from world to chunk coordinate.
|
||||
actual := c.ChunkCoordinate(test.WorldCoordinate)
|
||||
if actual != test.ChunkCoordinate {
|
||||
t.Errorf(
|
||||
"Failed ChunkCoordinate conversion:\n"+
|
||||
" Input: %s\n"+
|
||||
"Expected: %s\n"+
|
||||
" Got: %s",
|
||||
test.In,
|
||||
test.Expect,
|
||||
test.WorldCoordinate,
|
||||
test.ChunkCoordinate,
|
||||
actual,
|
||||
)
|
||||
}
|
||||
|
||||
// Test the relative (inside-chunk) coordinate.
|
||||
actual = level.RelativeCoordinate(test.WorldCoordinate, actual, c.Size)
|
||||
if actual != test.RelativeCoordinate {
|
||||
t.Errorf(
|
||||
"Failed RelativeCoordinate conversion:\n"+
|
||||
" Input: %s\n"+
|
||||
"Expected: %s\n"+
|
||||
" Got: %s",
|
||||
test.WorldCoordinate,
|
||||
test.RelativeCoordinate,
|
||||
actual,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -325,7 +325,7 @@ func (c *Chunker) GetChunk(p render.Point) (*Chunk, bool) {
|
|||
|
||||
// Hit the zipfile for it.
|
||||
if c.Zipfile != nil {
|
||||
if chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, p); err == nil {
|
||||
if chunk, err := c.ChunkFromZipfile(p); err == nil {
|
||||
// log.Debug("GetChunk(%s) cache miss, read from zip", p)
|
||||
c.SetChunk(p, chunk) // cache it
|
||||
c.logChunkAccess(p, chunk) // for the LRU cache
|
||||
|
@ -605,6 +605,56 @@ func (c *Chunker) ChunkCoordinate(abs render.Point) render.Point {
|
|||
)
|
||||
}
|
||||
|
||||
// RelativeCoordinate will translate from an absolute world coordinate, into one that
|
||||
// is relative to fit inside of the chunk with the given chunk coordinate and size.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// - With 128x128 chunks and a world coordinate of (280,-600)
|
||||
// - The ChunkCoordinate would be (2,-4) which encompasses (256,-512) to (383,-639)
|
||||
// - And relative inside that chunk, the pixel is at (24,)
|
||||
func RelativeCoordinate(abs render.Point, chunkCoord render.Point, chunkSize uint8) render.Point {
|
||||
// Pixel coordinate offset.
|
||||
var (
|
||||
size = int(chunkSize)
|
||||
offset = render.Point{
|
||||
X: chunkCoord.X * size,
|
||||
Y: chunkCoord.Y * size,
|
||||
}
|
||||
point = render.Point{
|
||||
X: abs.X - offset.X,
|
||||
Y: abs.Y - offset.Y,
|
||||
}
|
||||
)
|
||||
|
||||
if point.X < 0 || point.Y < 0 {
|
||||
log.Error("RelativeCoordinate: X < 0! abs=%s rel=%s chunk=%s size=%d", abs, point, chunkCoord, chunkSize)
|
||||
log.Error("RelativeCoordinate(2): size=%d offset=%s point=%s", size, offset, point)
|
||||
}
|
||||
|
||||
return point
|
||||
}
|
||||
|
||||
// FromRelativeCoordinate is the inverse of RelativeCoordinate.
|
||||
//
|
||||
// With a chunk size of 128 and a relative coordinate like (8, 12),
|
||||
// this function will return the absolute world coordinates based
|
||||
// on your chunk.Point's placement in the level.
|
||||
func FromRelativeCoordinate(rel render.Point, chunkCoord render.Point, chunkSize uint8) render.Point {
|
||||
var (
|
||||
size = int(chunkSize)
|
||||
offset = render.Point{
|
||||
X: chunkCoord.X * size,
|
||||
Y: chunkCoord.Y * size,
|
||||
}
|
||||
)
|
||||
|
||||
return render.Point{
|
||||
X: rel.X + offset.X,
|
||||
Y: rel.Y + offset.Y,
|
||||
}
|
||||
}
|
||||
|
||||
// ChunkMap maps a chunk coordinate to its chunk data.
|
||||
type ChunkMap map[render.Point]*Chunk
|
||||
|
||||
|
|
67
pkg/level/chunker_migrate.go
Normal file
67
pkg/level/chunker_migrate.go
Normal file
|
@ -0,0 +1,67 @@
|
|||
package level
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"sync"
|
||||
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/balance"
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
|
||||
)
|
||||
|
||||
/* Functions to migrate Chunkers between different implementations. */
|
||||
|
||||
// OptimizeChunkerAccessors will evaluate all of the chunks of your drawing
|
||||
// and possibly migrate them to a different Accessor implementation when
|
||||
// saving on disk.
|
||||
func (c *Chunker) OptimizeChunkerAccessors() {
|
||||
c.chunkMu.Lock()
|
||||
defer c.chunkMu.Unlock()
|
||||
|
||||
log.Info("Optimizing Chunker Accessors")
|
||||
|
||||
// TODO: parallelize this with goroutines
|
||||
var (
|
||||
chunks = make(chan *Chunk, len(c.Chunks))
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
for range runtime.NumCPU() {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for chunk := range chunks {
|
||||
var point = chunk.Point
|
||||
log.Warn("Chunk %s is a: %d", point, chunk.Type)
|
||||
|
||||
// Upgrade all MapTypes into RLE compressed MapTypes?
|
||||
if balance.RLEBinaryChunkerEnabled {
|
||||
if chunk.Type == MapType {
|
||||
log.Info("Optimizing chunk %s accessor from Map to RLE", point)
|
||||
ma, _ := chunk.Accessor.(*MapAccessor)
|
||||
rle := NewRLEAccessor(chunk).FromMapAccessor(ma)
|
||||
|
||||
c.Chunks[point].Type = RLEType
|
||||
c.Chunks[point].Accessor = rle
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// Feed it the chunks.
|
||||
for _, chunk := range c.Chunks {
|
||||
chunks <- chunk
|
||||
}
|
||||
|
||||
close(chunks)
|
||||
wg.Wait()
|
||||
|
||||
}
|
||||
|
||||
// FromMapAccessor migrates from a MapAccessor to RLE.
|
||||
func (a *RLEAccessor) FromMapAccessor(ma *MapAccessor) *RLEAccessor {
|
||||
return &RLEAccessor{
|
||||
chunk: a.chunk,
|
||||
acc: ma,
|
||||
}
|
||||
}
|
|
@ -228,3 +228,98 @@ func TestViewportChunks(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelativeCoordinates(t *testing.T) {
|
||||
|
||||
var (
|
||||
chunker = level.NewChunker(128)
|
||||
)
|
||||
|
||||
type TestCase struct {
|
||||
WorldCoord render.Point
|
||||
ChunkCoord render.Point
|
||||
ExpectRelative render.Point
|
||||
}
|
||||
var tests = []TestCase{
|
||||
{
|
||||
WorldCoord: render.NewPoint(4, 8),
|
||||
ExpectRelative: render.NewPoint(4, 8),
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(128, 128),
|
||||
ExpectRelative: render.NewPoint(0, 0),
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(143, 144),
|
||||
ExpectRelative: render.NewPoint(15, 16),
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(-105, -86),
|
||||
ExpectRelative: render.NewPoint(23, 42),
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(-252, 264),
|
||||
ExpectRelative: render.NewPoint(4, 8),
|
||||
},
|
||||
|
||||
// These were seen breaking actual levels, at the corners of the chunk
|
||||
{
|
||||
WorldCoord: render.NewPoint(511, 256),
|
||||
ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(511, 512),
|
||||
ChunkCoord: render.NewPoint(4, 4),
|
||||
ExpectRelative: render.NewPoint(127, 0), // was getting -1,0 in game
|
||||
},
|
||||
{
|
||||
WorldCoord: render.NewPoint(127, 384),
|
||||
ChunkCoord: render.NewPoint(1, 3),
|
||||
ExpectRelative: render.NewPoint(-1, 0),
|
||||
},
|
||||
}
|
||||
for i, test := range tests {
|
||||
var (
|
||||
chunkCoord = test.ChunkCoord
|
||||
actualRelative = level.RelativeCoordinate(
|
||||
test.WorldCoord,
|
||||
chunkCoord,
|
||||
chunker.Size,
|
||||
)
|
||||
roundTrip = level.FromRelativeCoordinate(
|
||||
actualRelative,
|
||||
chunkCoord,
|
||||
chunker.Size,
|
||||
)
|
||||
)
|
||||
|
||||
// compute expected chunk coord automatically?
|
||||
if chunkCoord == render.Origin {
|
||||
chunkCoord = chunker.ChunkCoordinate(test.WorldCoord)
|
||||
}
|
||||
|
||||
if actualRelative != test.ExpectRelative {
|
||||
t.Errorf("Test %d: world coord %s in chunk %s\n"+
|
||||
"Expected RelativeCoordinate() to be: %s\n"+
|
||||
"But it was: %s",
|
||||
i,
|
||||
test.WorldCoord,
|
||||
chunkCoord,
|
||||
test.ExpectRelative,
|
||||
actualRelative,
|
||||
)
|
||||
}
|
||||
|
||||
if roundTrip != test.WorldCoord {
|
||||
t.Errorf("Test %d: world coord %s in chunk %s\n"+
|
||||
"Did not survive round trip! Expected: %s\n"+
|
||||
"But it was: %s",
|
||||
i,
|
||||
test.WorldCoord,
|
||||
chunkCoord,
|
||||
test.WorldCoord,
|
||||
roundTrip,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package level
|
|||
|
||||
import (
|
||||
"archive/zip"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
|
@ -92,7 +93,7 @@ func (c *Chunker) MigrateZipfile(zf *zip.Writer) error {
|
|||
}
|
||||
|
||||
// Verify that this chunk file in the old ZIP was not empty.
|
||||
chunk, err := ChunkFromZipfile(c.Zipfile, c.Layer, point)
|
||||
chunk, err := c.ChunkFromZipfile(point)
|
||||
if err == nil && chunk.Len() == 0 {
|
||||
log.Debug("Skip chunk %s (old zipfile chunk was empty)", coord)
|
||||
continue
|
||||
|
@ -190,11 +191,7 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error {
|
|||
data = bytes
|
||||
}
|
||||
} else {
|
||||
if json, err := c.MarshalJSON(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
data = json
|
||||
}
|
||||
return errors.New("Chunk.ToZipfile: JSON chunk format no longer supported for writing")
|
||||
}
|
||||
|
||||
// Write the file contents to zip whether binary or json.
|
||||
|
@ -208,14 +205,20 @@ func (c *Chunk) ToZipfile(zf *zip.Writer, layer int, coord render.Point) error {
|
|||
}
|
||||
|
||||
// ChunkFromZipfile loads a chunk from a zipfile.
|
||||
func ChunkFromZipfile(zf *zip.Reader, layer int, coord render.Point) (*Chunk, error) {
|
||||
func (c *Chunker) ChunkFromZipfile(coord render.Point) (*Chunk, error) {
|
||||
// File names?
|
||||
var (
|
||||
zf = c.Zipfile
|
||||
layer = c.Layer
|
||||
|
||||
binfile = fmt.Sprintf("chunks/%d/%s.bin", layer, coord)
|
||||
jsonfile = fmt.Sprintf("chunks/%d/%s.json", layer, coord)
|
||||
chunk = NewChunk()
|
||||
)
|
||||
|
||||
chunk.Point = coord
|
||||
chunk.Size = c.Size
|
||||
|
||||
// Read from the new binary format.
|
||||
if file, err := zf.Open(binfile); err == nil {
|
||||
// log.Debug("Reading binary compressed chunk from %s", binfile)
|
||||
|
@ -226,6 +229,7 @@ func ChunkFromZipfile(zf *zip.Reader, layer int, coord render.Point) (*Chunk, er
|
|||
|
||||
err = chunk.UnmarshalBinary(bin)
|
||||
if err != nil {
|
||||
log.Error("ChunkFromZipfile(%s): %s", coord, err)
|
||||
return nil, err
|
||||
}
|
||||
} else if file, err := zf.Open(jsonfile); err == nil {
|
||||
|
|
|
@ -4,6 +4,21 @@ import "git.kirsle.net/SketchyMaze/doodle/pkg/log"
|
|||
|
||||
// Maintenance functions for the file format on disk.
|
||||
|
||||
// Vacuum runs any maintenance or migration tasks for the level at time of save.
|
||||
//
|
||||
// It will prune broken links between actors, or migrate internal data structures
|
||||
// to optimize storage on disk of its binary data.
|
||||
func (m *Level) Vacuum() error {
|
||||
if links := m.PruneLinks(); links > 0 {
|
||||
log.Debug("Vacuum: removed %d broken links between actors in this level.")
|
||||
}
|
||||
|
||||
// Let the Chunker optimize accessor types.
|
||||
m.Chunker.OptimizeChunkerAccessors()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PruneLinks cleans up any Actor Links that can not be resolved in the
|
||||
// level data. For example, if actors were linked in Edit Mode and one
|
||||
// actor is deleted leaving a broken link.
|
||||
|
|
|
@ -3,6 +3,7 @@ package level
|
|||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
|
@ -96,7 +97,9 @@ func (m *Level) WriteFile(filename string) error {
|
|||
m.GameVersion = branding.Version
|
||||
|
||||
// Maintenance functions, clean up cruft before save.
|
||||
m.PruneLinks()
|
||||
if err := m.Vacuum(); err != nil {
|
||||
log.Error("Vacuum level %s: %s", filename, err)
|
||||
}
|
||||
|
||||
bin, err := m.ToJSON()
|
||||
if err != nil {
|
||||
|
@ -115,7 +118,7 @@ func (m *Level) WriteFile(filename string) error {
|
|||
}
|
||||
|
||||
// Desktop: write to disk.
|
||||
err = ioutil.WriteFile(filename, bin, 0644)
|
||||
err = os.WriteFile(filename, bin, 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("level.WriteFile: %s", err)
|
||||
}
|
||||
|
|
189
pkg/level/rle/rle.go
Normal file
189
pkg/level/rle/rle.go
Normal file
|
@ -0,0 +1,189 @@
|
|||
// Package rle contains support for Run-Length Encoding of level chunks.
|
||||
package rle
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/log"
|
||||
"git.kirsle.net/go/render"
|
||||
)
|
||||
|
||||
const NullColor = 0xFFFF
|
||||
|
||||
// Grid is a 2D array of nullable integers to store a flat bitmap of a chunk.
|
||||
type Grid [][]*uint64
|
||||
|
||||
// NewGrid will return an initialized 2D grid of equal dimensions of the given size.
|
||||
//
|
||||
// The grid is indexed in [Y][X] notation, or: by row first and then column.
|
||||
func NewGrid(size int) (Grid, error) {
|
||||
if size == 0 {
|
||||
return nil, errors.New("no size given for RLE Grid: the chunker was probably not initialized")
|
||||
}
|
||||
|
||||
var grid = make([][]*uint64, size+1)
|
||||
for i := 0; i < size+1; i++ {
|
||||
grid[i] = make([]*uint64, size+1)
|
||||
}
|
||||
|
||||
return grid, nil
|
||||
}
|
||||
|
||||
func MustGrid(size int) Grid {
|
||||
grid, err := NewGrid(size)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return grid
|
||||
}
|
||||
|
||||
type Pixel struct {
|
||||
Point render.Point
|
||||
Palette int
|
||||
}
|
||||
|
||||
// Size of the grid.
|
||||
func (g Grid) Size() int {
|
||||
return len(g[0])
|
||||
}
|
||||
|
||||
// Compress the grid into a byte stream of RLE compressed data.
|
||||
//
|
||||
// The compressed format is a stream of:
|
||||
//
|
||||
// - A Uvarint for the palette index (0-255) or 0xffff (65535) for null.
|
||||
// - A Uvarint for how many pixels to repeat that color.
|
||||
func (g Grid) Compress() ([]byte, error) {
|
||||
log.Error("BEGIN Compress()")
|
||||
// log.Warn("Visualized:\n%s", g.Visualize())
|
||||
|
||||
// Run-length encode the grid.
|
||||
var (
|
||||
compressed []byte // final result
|
||||
lastColor uint64 // last color seen (current streak)
|
||||
runLength uint64 // current streak for the last color
|
||||
buffering bool // detect end of grid
|
||||
|
||||
// Flush the buffer
|
||||
flush = func() {
|
||||
// log.Info("flush: %d for %d length", lastColor, runLength)
|
||||
compressed = binary.AppendUvarint(compressed, lastColor)
|
||||
compressed = binary.AppendUvarint(compressed, runLength)
|
||||
}
|
||||
)
|
||||
|
||||
for y, row := range g {
|
||||
for x, nullableIndex := range row {
|
||||
var index uint64
|
||||
if nullableIndex == nil {
|
||||
index = NullColor
|
||||
} else {
|
||||
index = *nullableIndex
|
||||
}
|
||||
|
||||
// First color of the grid
|
||||
if y == 0 && x == 0 {
|
||||
// log.Info("First color @ %dx%d is %d", x, y, index)
|
||||
lastColor = index
|
||||
runLength = 1
|
||||
continue
|
||||
}
|
||||
|
||||
// Buffer it until we get a change of color or EOF.
|
||||
if index != lastColor {
|
||||
// log.Info("Color %d streaks for %d until %dx%d", lastColor, runLength, x, y)
|
||||
flush()
|
||||
lastColor = index
|
||||
runLength = 1
|
||||
buffering = false
|
||||
continue
|
||||
}
|
||||
|
||||
buffering = true
|
||||
runLength++
|
||||
}
|
||||
}
|
||||
|
||||
// Flush the final buffer when we got to EOF on the grid.
|
||||
if buffering {
|
||||
flush()
|
||||
}
|
||||
|
||||
// log.Error("RLE compressed: %v", compressed)
|
||||
|
||||
return compressed, nil
|
||||
}
|
||||
|
||||
// Decompress the RLE byte stream back into a populated 2D grid.
|
||||
func (g Grid) Decompress(compressed []byte) error {
|
||||
log.Error("BEGIN Decompress()")
|
||||
// log.Warn("Visualized:\n%s", g.Visualize())
|
||||
|
||||
// Prepare the 2D grid to decompress the RLE stream into.
|
||||
var (
|
||||
size = g.Size()
|
||||
x, y, cursor int
|
||||
)
|
||||
|
||||
var reader = bytes.NewBuffer(compressed)
|
||||
|
||||
for {
|
||||
var (
|
||||
paletteIndexRaw, err1 = binary.ReadUvarint(reader)
|
||||
repeatCount, err2 = binary.ReadUvarint(reader)
|
||||
)
|
||||
|
||||
if err1 != nil || err2 != nil {
|
||||
break
|
||||
}
|
||||
|
||||
// Handle the null color.
|
||||
var paletteIndex *uint64
|
||||
if paletteIndexRaw != NullColor {
|
||||
paletteIndex = &paletteIndexRaw
|
||||
}
|
||||
|
||||
// log.Warn("RLE index %v for %dpx", paletteIndexRaw, repeatCount)
|
||||
|
||||
for i := uint64(0); i < repeatCount; i++ {
|
||||
cursor++
|
||||
if cursor%size == 0 {
|
||||
y++
|
||||
x = 0
|
||||
}
|
||||
|
||||
point := render.NewPoint(int(x), int(y))
|
||||
if point.Y >= size || point.X >= size {
|
||||
continue
|
||||
}
|
||||
g[point.Y][point.X] = paletteIndex
|
||||
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
// log.Warn("Visualized:\n%s", g.Visualize())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Visualize the state of the 2D grid.
|
||||
func (g Grid) Visualize() string {
|
||||
var lines []string
|
||||
for _, row := range g {
|
||||
var line = "["
|
||||
for _, col := range row {
|
||||
if col == nil {
|
||||
line += " "
|
||||
} else {
|
||||
line += fmt.Sprintf("%x", *col)
|
||||
}
|
||||
}
|
||||
lines = append(lines, line+"]")
|
||||
}
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
43
pkg/level/rle/rle_test.go
Normal file
43
pkg/level/rle/rle_test.go
Normal file
|
@ -0,0 +1,43 @@
|
|||
package rle_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"git.kirsle.net/SketchyMaze/doodle/pkg/level/rle"
|
||||
)
|
||||
|
||||
func TestRLE(t *testing.T) {
|
||||
|
||||
// Test a completely filled grid.
|
||||
var (
|
||||
grid = rle.MustGrid(128)
|
||||
color = uint64(5)
|
||||
)
|
||||
for y := range grid {
|
||||
for x := range y {
|
||||
grid[y][x] = &color
|
||||
}
|
||||
}
|
||||
|
||||
// Compress and decompress it.
|
||||
var (
|
||||
compressed, _ = grid.Compress()
|
||||
grid2 = rle.MustGrid(128)
|
||||
)
|
||||
grid2.Decompress(compressed)
|
||||
|
||||
// Ensure our color is set everywhere.
|
||||
for y := range grid {
|
||||
for x := range y {
|
||||
if grid[y][x] != &color {
|
||||
t.Errorf("RLE compression didn't survive the round trip: %d,%d didn't save\n"+
|
||||
" Expected: %d\n"+
|
||||
" Actually: %v",
|
||||
x, y,
|
||||
color,
|
||||
grid[y][x],
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -420,7 +420,7 @@ func (w *Canvas) loopEditable(ev *event.State) error {
|
|||
baseColor, err := chunker.Get(cursor)
|
||||
if err != nil {
|
||||
limit = balance.FloodToolVoidLimit
|
||||
log.Warn("FloodTool: couldn't get base color at %s: %s (got %s)", cursor, err, baseColor.Color)
|
||||
log.Warn("FloodTool: couldn't get base color at %s: %s (got %+v)", cursor, err, baseColor)
|
||||
}
|
||||
|
||||
// If no change, do nothing.
|
||||
|
|
Loading…
Reference in New Issue
Block a user