🐛 Account for file ids > 9

This commit is contained in:
Dan Jones 2024-12-09 14:34:01 -06:00
commit 495d50ba10
7 changed files with 40 additions and 37 deletions

1
.gitignore vendored
View file

@ -23,3 +23,4 @@ go.work.sum
# env file # env file
.env .env
defrag

View file

@ -1,30 +1,31 @@
package defrag package defrag
import ( import (
"bytes"
"strconv" "strconv"
"strings"
) )
func Blocks(diskMap []byte) ([]byte, error) { func Blocks(diskMap []byte) ([]int, error) {
current := 0 current := 0
onFile := false onFile := false
buff := bytes.Buffer{} out := []int{}
for _, by := range diskMap { for _, by := range diskMap {
count, err := strconv.Atoi(string(by)) count, err := strconv.Atoi(string(by))
if err != nil { if err != nil {
return nil, err return nil, err
} }
onFile = !onFile onFile = !onFile
if count == 0 { if count == 0 {
continue continue
} }
append := "." val := -1
if onFile { if onFile {
append = strconv.Itoa(current) val = current
current++ current++
} }
buff.WriteString(strings.Repeat(append, count)) for idx := 0; idx < count; idx++ {
out = append(out, val)
}
} }
return buff.Bytes(), nil return out, nil
} }

View file

@ -7,16 +7,15 @@ import (
) )
type tt struct { type tt struct {
name string name string
diskMap []byte diskMap []byte
blocks []byte blocks []int
defragged []byte
} }
func sampleTests() []tt { func sampleTests() []tt {
return []tt{ return []tt{
{"short", []byte("12345"), []byte("0..111....22222"), []byte("022111222......")}, {"short", []byte("12345"), []int{0, -1, -1, 1, 1, 1, -1, -1, -1, -1, 2, 2, 2, 2, 2}},
{"longer", []byte("2333133121414131402"), []byte("00...111...2...333.44.5555.6666.777.888899"), []byte("0099811188827773336446555566..............")}, {"longer", []byte("2333133121414131402"), []int{0, 0, -1, -1, -1, 1, 1, 1, -1, -1, -1, 2, -1, -1, -1, 3, 3, 3, -1, 4, 4, -1, 5, 5, 5, 5, -1, 6, 6, 6, 6, -1, 7, 7, 7, -1, 8, 8, 8, 8, 9, 9}},
} }
} }

14
checksum.go Normal file
View file

@ -0,0 +1,14 @@
package defrag
// import "strconv"
func Checksum(blocks []int) (sum int, err error) {
for idx, fileByte := range blocks {
if fileByte == -1 {
break
}
sum += idx * fileByte
}
return
}

View file

@ -29,6 +29,7 @@ func main() {
blocks, err := defrag.Blocks(input) blocks, err := defrag.Blocks(input)
handleErr(err) handleErr(err)
err = defrag.Defrag(blocks) err = defrag.Defrag(blocks)
handleErr(err) handleErr(err)
@ -36,5 +37,11 @@ func main() {
fmt.Printf("%s\n", input) fmt.Printf("%s\n", input)
fmt.Println("Got this") fmt.Println("Got this")
fmt.Printf("%s\n", blocks) fmt.Printf("%v\n", blocks)
sum, err := defrag.Checksum(blocks)
handleErr(err)
fmt.Println("Checksum:")
fmt.Printf("%d\n", sum)
} }

View file

@ -2,18 +2,18 @@ package defrag
// Defrag performs an in-place sort of blocks. // Defrag performs an in-place sort of blocks.
// If an error occurs during processing, it will be returned. The slice may have been partially sorted. // If an error occurs during processing, it will be returned. The slice may have been partially sorted.
func Defrag(blocks []byte) error { func Defrag(blocks []int) error {
lastPulled := len(blocks) lastPulled := len(blocks)
lastPushed := -1 lastPushed := -1
for block := 0; block < len(blocks); block++ { for block := 0; block < len(blocks); block++ {
if blocks[block] != '.' { if blocks[block] != -1 {
continue continue
} }
if lastPulled <= block { if lastPulled <= block {
break break
} }
for swap := lastPulled - 1; swap > lastPushed; swap-- { for swap := lastPulled - 1; swap > lastPushed; swap-- {
if blocks[swap] == '.' { if blocks[swap] == -1 {
continue continue
} }
blocks[block], blocks[swap] = blocks[swap], blocks[block] blocks[block], blocks[swap] = blocks[swap], blocks[block]

View file

@ -1,19 +0,0 @@
package defrag
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestDefrag(t *testing.T) {
for _, testcase := range sampleTests() {
t.Run(testcase.name, func(sub *testing.T) {
blocks := make([]byte, len(testcase.blocks))
copy(blocks, testcase.blocks)
err := Defrag(blocks)
require.NoError(sub, err)
require.Equal(sub, testcase.defragged, blocks)
})
}
}