mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2025-11-10 00:07:30 -06:00
Grand test fixup (#138)
* start fixing up tests * fix up tests + automate with drone * fiddle with linting * messing about with drone.yml * some more fiddling * hmmm * add cache * add vendor directory * verbose * ci updates * update some little things * update sig
This commit is contained in:
parent
329a5e8144
commit
98263a7de6
2677 changed files with 1090869 additions and 219 deletions
21
vendor/github.com/superseriousbusiness/exifremove/LICENSE
generated
vendored
Normal file
21
vendor/github.com/superseriousbusiness/exifremove/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019 scott lee davis
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
140
vendor/github.com/superseriousbusiness/exifremove/pkg/exifremove/exifremove.go
generated
vendored
Normal file
140
vendor/github.com/superseriousbusiness/exifremove/pkg/exifremove/exifremove.go
generated
vendored
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
package exifremove
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
|
||||
"github.com/dsoprea/go-exif"
|
||||
jpegstructure "github.com/dsoprea/go-jpeg-image-structure"
|
||||
pngstructure "github.com/dsoprea/go-png-image-structure"
|
||||
"github.com/h2non/filetype"
|
||||
)
|
||||
|
||||
func Remove(data []byte) ([]byte, error) {
|
||||
|
||||
const (
|
||||
JpegMediaType = "jpeg"
|
||||
PngMediaType = "png"
|
||||
OtherMediaType = "other"
|
||||
StartBytes = 0
|
||||
EndBytes = 0
|
||||
)
|
||||
|
||||
type MediaContext struct {
|
||||
MediaType string
|
||||
RootIfd *exif.Ifd
|
||||
RawExif []byte
|
||||
Media interface{}
|
||||
}
|
||||
|
||||
filtered := []byte{}
|
||||
|
||||
head := make([]byte, 261)
|
||||
_, err := bytes.NewReader(data).Read(head)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read first 261 bytes of data: %s", err)
|
||||
}
|
||||
imagetype, err := filetype.Match(head)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error matching first 261 bytes of image to valid type: %s", err)
|
||||
}
|
||||
|
||||
switch imagetype.MIME.Subtype {
|
||||
case "jpeg":
|
||||
jmp := jpegstructure.NewJpegMediaParser()
|
||||
sl, err := jmp.ParseBytes(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, rawExif, err := sl.Exif()
|
||||
if err != nil {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
startExifBytes := StartBytes
|
||||
endExifBytes := EndBytes
|
||||
|
||||
if bytes.Contains(data, rawExif) {
|
||||
for i := 0; i < len(data)-len(rawExif); i++ {
|
||||
if bytes.Compare(data[i:i+len(rawExif)], rawExif) == 0 {
|
||||
startExifBytes = i
|
||||
endExifBytes = i + len(rawExif)
|
||||
break
|
||||
}
|
||||
}
|
||||
fill := make([]byte, len(data[startExifBytes:endExifBytes]))
|
||||
copy(data[startExifBytes:endExifBytes], fill)
|
||||
}
|
||||
|
||||
filtered = data
|
||||
|
||||
_, err = jpeg.Decode(bytes.NewReader(filtered))
|
||||
if err != nil {
|
||||
return nil, errors.New("EXIF removal corrupted " + err.Error())
|
||||
}
|
||||
case "png":
|
||||
pmp := pngstructure.NewPngMediaParser()
|
||||
cs, err := pmp.ParseBytes(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, rawExif, err := cs.Exif()
|
||||
if err != nil {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
startExifBytes := StartBytes
|
||||
endExifBytes := EndBytes
|
||||
|
||||
if bytes.Contains(data, rawExif) {
|
||||
for i := 0; i < len(data)-len(rawExif); i++ {
|
||||
if bytes.Compare(data[i:i+len(rawExif)], rawExif) == 0 {
|
||||
startExifBytes = i
|
||||
endExifBytes = i + len(rawExif)
|
||||
break
|
||||
}
|
||||
}
|
||||
fill := make([]byte, len(data[startExifBytes:endExifBytes]))
|
||||
copy(data[startExifBytes:endExifBytes], fill)
|
||||
}
|
||||
|
||||
filtered = data
|
||||
|
||||
chunks := readPNGChunks(bytes.NewReader(filtered))
|
||||
|
||||
for _, chunk := range chunks {
|
||||
if !chunk.CRCIsValid() {
|
||||
offset := int(chunk.Offset) + 8 + int(chunk.Length)
|
||||
crc := chunk.CalculateCRC()
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
binary.Write(buf, binary.BigEndian, crc)
|
||||
crcBytes := buf.Bytes()
|
||||
|
||||
copy(filtered[offset:], crcBytes)
|
||||
}
|
||||
}
|
||||
|
||||
chunks = readPNGChunks(bytes.NewReader(filtered))
|
||||
for _, chunk := range chunks {
|
||||
if !chunk.CRCIsValid() {
|
||||
return nil, errors.New("EXIF removal failed CRC")
|
||||
}
|
||||
}
|
||||
|
||||
_, err = png.Decode(bytes.NewReader(filtered))
|
||||
if err != nil {
|
||||
return nil, errors.New("EXIF removal corrupted " + err.Error())
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("filetype not recognised")
|
||||
}
|
||||
|
||||
return filtered, nil
|
||||
}
|
||||
104
vendor/github.com/superseriousbusiness/exifremove/pkg/exifremove/png_crc_fix.go
generated
vendored
Normal file
104
vendor/github.com/superseriousbusiness/exifremove/pkg/exifremove/png_crc_fix.go
generated
vendored
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
package exifremove
|
||||
|
||||
// borrowed heavily from https://github.com/landaire/png-crc-fix/blob/master/main.go
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
const chunkStartOffset = 8
|
||||
const endChunk = "IEND"
|
||||
|
||||
type pngChunk struct {
|
||||
Offset int64
|
||||
Length uint32
|
||||
Type [4]byte
|
||||
Data []byte
|
||||
CRC uint32
|
||||
}
|
||||
|
||||
func (p pngChunk) String() string {
|
||||
return fmt.Sprintf("%s@%x - %X - Valid CRC? %v", p.Type, p.Offset, p.CRC, p.CRCIsValid())
|
||||
}
|
||||
|
||||
func (p pngChunk) Bytes() []byte {
|
||||
var buffer bytes.Buffer
|
||||
|
||||
binary.Write(&buffer, binary.BigEndian, p.Type)
|
||||
buffer.Write(p.Data)
|
||||
|
||||
return buffer.Bytes()
|
||||
}
|
||||
|
||||
func (p pngChunk) CRCIsValid() bool {
|
||||
return p.CRC == p.CalculateCRC()
|
||||
}
|
||||
|
||||
func (p pngChunk) CalculateCRC() uint32 {
|
||||
crcTable := crc32.MakeTable(crc32.IEEE)
|
||||
|
||||
return crc32.Checksum(p.Bytes(), crcTable)
|
||||
}
|
||||
|
||||
func (p pngChunk) CRCOffset() int64 {
|
||||
return p.Offset + int64(8+p.Length)
|
||||
}
|
||||
|
||||
func readPNGChunks(reader io.ReadSeeker) []pngChunk {
|
||||
chunks := []pngChunk{}
|
||||
|
||||
reader.Seek(chunkStartOffset, os.SEEK_SET)
|
||||
|
||||
readChunk := func() (*pngChunk, error) {
|
||||
var chunk pngChunk
|
||||
chunk.Offset, _ = reader.Seek(0, os.SEEK_CUR)
|
||||
|
||||
binary.Read(reader, binary.BigEndian, &chunk.Length)
|
||||
|
||||
chunk.Data = make([]byte, chunk.Length)
|
||||
|
||||
err := binary.Read(reader, binary.BigEndian, &chunk.Type)
|
||||
if err != nil {
|
||||
goto read_error
|
||||
}
|
||||
|
||||
if read, err := reader.Read(chunk.Data); read == 0 || err != nil {
|
||||
goto read_error
|
||||
}
|
||||
|
||||
err = binary.Read(reader, binary.BigEndian, &chunk.CRC)
|
||||
if err != nil {
|
||||
goto read_error
|
||||
}
|
||||
|
||||
return &chunk, nil
|
||||
|
||||
read_error:
|
||||
return nil, fmt.Errorf("Read error")
|
||||
}
|
||||
|
||||
chunk, err := readChunk()
|
||||
if err != nil {
|
||||
return chunks
|
||||
}
|
||||
|
||||
chunks = append(chunks, *chunk)
|
||||
|
||||
// Read the first chunk
|
||||
for string(chunks[len(chunks)-1].Type[:]) != endChunk {
|
||||
|
||||
chunk, err := readChunk()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
chunks = append(chunks, *chunk)
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue