Convert to magefile for validation

Convert Makefile to Magefile
Add json validations
This commit is contained in:
Merith 2025-06-22 12:44:17 -07:00
parent 118e2f12c6
commit 1ea86db763
58 changed files with 530 additions and 54 deletions

256
magefile.go Normal file
View file

@ -0,0 +1,256 @@
//go:build mage
// +build mage
package main
import (
"archive/zip"
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"unicode/utf8"
"github.com/magefile/mage/mg"
"github.com/magefile/mage/sh"
"github.com/santhosh-tekuri/jsonschema/v5"
)
const (
zipName = "DigitalStorageTweaks.zip"
contentDir = "./ContentLib"
pluginFile = "./DigitalStorageTweaks.uplugin"
schemaBaseURL = "https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/"
schemaDir = "./schema"
)
var binaryExtensions = map[string]bool{
".png": true, ".jpg": true, ".jpeg": true, ".bmp": true, ".gif": true,
".dds": true, ".tga": true, ".psd": true, ".fbx": true, ".uasset": true,
".umap": true,
}
var Default = Build
func Build() {
mg.SerialDeps(Validate, Package)
}
func Package() error {
fmt.Println("Packaging files...")
for _, dir := range []string{"Windows", "WindowsServer", "LinuxServer"} {
if err := os.MkdirAll(dir, 0755); err != nil {
return fmt.Errorf("creating %s: %w", dir, err)
}
if err := sh.Run("cp", "-r", contentDir, pluginFile, dir+"/"); err != nil {
return fmt.Errorf("copying to %s: %w", dir, err)
}
}
if err := createZip(zipName, "Windows", "WindowsServer", "LinuxServer"); err != nil {
return fmt.Errorf("creating zip: %w", err)
}
return Clean("Windows", "WindowsServer", "LinuxServer")
}
func Validate() error {
fmt.Println("Validating files...")
compiler := jsonschema.NewCompiler()
compiler.LoadURL = func(url string) (io.ReadCloser, error) {
return nil, fmt.Errorf("remote schema loading disabled: %s", url)
}
if err := filepath.Walk(schemaDir, func(path string, info os.FileInfo, err error) error {
if err != nil || info.IsDir() || filepath.Ext(path) != ".json" {
return err
}
relPath, err := filepath.Rel(schemaDir, path)
if err != nil {
return err
}
id := schemaBaseURL + filepath.ToSlash(relPath)
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
return compiler.AddResource(id, bytes.NewReader(data))
}); err != nil {
return fmt.Errorf("failed to load schemas: %w", err)
}
schema, err := compiler.Compile(schemaBaseURL + "CL_Recipe.json")
if err != nil {
return fmt.Errorf("invalid main schema: %w", err)
}
var failed []string
paths := []string{pluginFile}
_ = filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error {
if err == nil && !info.IsDir() {
paths = append(paths, path)
}
return nil
})
for _, path := range paths {
if err := validateFile(path, schema); err != nil {
failed = append(failed, fmt.Sprintf("%s: %v", path, err))
}
}
if len(failed) > 0 {
fmt.Println("Validation errors:")
for _, msg := range failed {
fmt.Println(" -", msg)
}
return fmt.Errorf("%d file(s) failed validation", len(failed))
}
fmt.Println("All files validated successfully.")
return nil
}
func validateFile(path string, schema *jsonschema.Schema) error {
if binaryExtensions[filepath.Ext(path)] {
return nil
}
if filepath.Ext(path) == ".json" {
return validateJSON(path, schema)
}
return validateEncoding(path)
}
func validateJSON(path string, schema *jsonschema.Schema) error {
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
data = sanitizeJSONBytes(data)
var lines [][]byte
for _, line := range bytes.Split(data, []byte("\n")) {
trim := bytes.TrimSpace(line)
if !bytes.HasPrefix(trim, []byte("//")) && len(trim) > 0 {
lines = append(lines, line)
}
}
clean := bytes.Join(lines, []byte("\n"))
var v interface{}
if err := json.Unmarshal(clean, &v); err != nil {
return err
}
return schema.Validate(v)
}
func sanitizeJSONBytes(data []byte) []byte {
data = bytes.TrimPrefix(data, []byte{0xEF, 0xBB, 0xBF})
var out bytes.Buffer
for len(data) > 0 {
r, size := utf8.DecodeRune(data)
if r == utf8.RuneError && size == 1 || r == '\x00' {
data = data[1:]
continue
}
out.WriteRune(r)
data = data[size:]
}
return out.Bytes()
}
func validateEncoding(path string) error {
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
for i := 0; i < len(data); {
if data[i] > 127 {
// DO NOT declare "size", it is unused and will cause a compiler error
r, _ := utf8.DecodeRune(data[i:])
if r == utf8.RuneError {
return fmt.Errorf("invalid UTF-8 sequence at position %d", i)
}
return fmt.Errorf("non-ASCII character %U at position %d", r, i)
}
i++
}
return nil
}
func Clean(list ...string) error {
fmt.Println("Cleaning up...")
for _, f := range list {
if err := os.RemoveAll(f); err != nil {
return fmt.Errorf("failed to remove %s: %w", f, err)
}
}
return nil
}
func createZip(zipPath string, dirs ...string) error {
zipFile, err := os.Create(zipPath)
if err != nil {
return fmt.Errorf("creating zip file: %w", err)
}
defer zipFile.Close()
zipWriter := zip.NewWriter(zipFile)
defer zipWriter.Close()
for _, dir := range dirs {
if err := addDirToZip(zipWriter, dir); err != nil {
return fmt.Errorf("adding %s to zip: %w", dir, err)
}
}
return nil
}
func addDirToZip(zipWriter *zip.Writer, dirPath string) error {
return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error {
if err != nil {
return err
}
relPath, err := filepath.Rel(dirPath, filePath)
if err != nil {
return err
}
zipPath := filepath.Join(filepath.Base(dirPath), relPath)
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
header.Name = filepath.ToSlash(zipPath)
header.Method = zip.Deflate
if info.IsDir() {
header.Name += "/"
_, err := zipWriter.CreateHeader(header)
return err
}
file, err := os.Open(filePath)
if err != nil {
return err
}
defer file.Close()
writer, err := zipWriter.CreateHeader(header)
if err != nil {
return err
}
if binaryExtensions[strings.ToLower(filepath.Ext(filePath))] {
_, err = io.Copy(writer, file)
return err
}
content, err := ioutil.ReadAll(file)
if err != nil {
return err
}
content = convertToDOSLineEndings(content)
_, err = writer.Write(content)
return err
})
}
func convertToDOSLineEndings(content []byte) []byte {
normalized := bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n"))
return bytes.ReplaceAll(normalized, []byte("\n"), []byte("\r\n"))
}