349 lines
8.2 KiB
Go
349 lines
8.2 KiB
Go
//go:build mage
|
|
// +build mage
|
|
|
|
package main
|
|
|
|
import (
|
|
"archive/zip"
|
|
"bytes"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"os"
|
|
"path/filepath"
|
|
"strings"
|
|
"unicode/utf8"
|
|
|
|
"github.com/magefile/mage/mg"
|
|
"github.com/magefile/mage/sh"
|
|
"github.com/santhosh-tekuri/jsonschema/v5"
|
|
)
|
|
|
|
const (
|
|
zipName = "DigitalStorageTweaks.zip"
|
|
contentDir = "./ContentLib"
|
|
pluginFile = "./DigitalStorageTweaks.uplugin"
|
|
schemaBaseURL = "https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/"
|
|
schemaDir = "./schema"
|
|
)
|
|
|
|
var (
|
|
binaryExtensions = map[string]bool{
|
|
".png": true, ".jpg": true, ".jpeg": true, ".bmp": true, ".gif": true,
|
|
".dds": true, ".tga": true, ".psd": true, ".fbx": true, ".uasset": true,
|
|
".umap": true,
|
|
}
|
|
|
|
targetPlatforms = []string{"Windows", "WindowsServer", "LinuxServer"}
|
|
)
|
|
|
|
var Default = Build
|
|
|
|
func Build() {
|
|
mg.SerialDeps(Validate, Package)
|
|
}
|
|
|
|
// Package creates distribution packages for all target platforms
|
|
func Package() error {
|
|
fmt.Println("Packaging files...")
|
|
|
|
if err := createPlatformDirectories(); err != nil {
|
|
return fmt.Errorf("failed to create platform directories: %w", err)
|
|
}
|
|
|
|
if err := createZip(zipName, targetPlatforms...); err != nil {
|
|
return fmt.Errorf("failed to create zip archive: %w", err)
|
|
}
|
|
|
|
return Clean(targetPlatforms...)
|
|
}
|
|
|
|
func createPlatformDirectories() error {
|
|
for _, platform := range targetPlatforms {
|
|
if err := os.MkdirAll(platform, 0755); err != nil {
|
|
return fmt.Errorf("failed to create directory %s: %w", platform, err)
|
|
}
|
|
|
|
if err := sh.Run("cp", "-r", contentDir, pluginFile, platform+"/"); err != nil {
|
|
return fmt.Errorf("failed to copy files to %s: %w", platform, err)
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// Validate checks all files for proper formatting and schema compliance
|
|
func Validate() error {
|
|
fmt.Println("Validating files...")
|
|
|
|
schema, err := setupSchemaValidator()
|
|
if err != nil {
|
|
return fmt.Errorf("schema setup failed: %w", err)
|
|
}
|
|
|
|
filesToValidate, err := collectFilesToValidate()
|
|
if err != nil {
|
|
return fmt.Errorf("failed to collect files for validation: %w", err)
|
|
}
|
|
|
|
validationErrors := validateFiles(filesToValidate, schema)
|
|
|
|
if len(validationErrors) > 0 {
|
|
logValidationErrors(validationErrors)
|
|
return fmt.Errorf("%d file(s) failed validation", len(validationErrors))
|
|
}
|
|
|
|
fmt.Println("All files validated successfully.")
|
|
return nil
|
|
}
|
|
|
|
func setupSchemaValidator() (*jsonschema.Schema, error) {
|
|
compiler := jsonschema.NewCompiler()
|
|
compiler.LoadURL = func(url string) (io.ReadCloser, error) {
|
|
return nil, fmt.Errorf("remote schema loading disabled: %s", url)
|
|
}
|
|
|
|
if err := loadLocalSchemas(compiler); err != nil {
|
|
return nil, fmt.Errorf("failed to load schemas: %w", err)
|
|
}
|
|
|
|
return compiler.Compile(schemaBaseURL + "CL_Recipe.json")
|
|
}
|
|
|
|
func loadLocalSchemas(compiler *jsonschema.Compiler) error {
|
|
return filepath.Walk(schemaDir, func(path string, info os.FileInfo, err error) error {
|
|
if err != nil || info.IsDir() || filepath.Ext(path) != ".json" {
|
|
return err
|
|
}
|
|
|
|
relPath, err := filepath.Rel(schemaDir, path)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
id := schemaBaseURL + filepath.ToSlash(relPath)
|
|
data, err := os.ReadFile(path)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
return compiler.AddResource(id, bytes.NewReader(data))
|
|
})
|
|
}
|
|
|
|
func collectFilesToValidate() ([]string, error) {
|
|
var files []string
|
|
files = append(files, pluginFile)
|
|
|
|
err := filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error {
|
|
if err == nil && !info.IsDir() {
|
|
files = append(files, path)
|
|
}
|
|
return err
|
|
})
|
|
|
|
return files, err
|
|
}
|
|
|
|
func validateFiles(files []string, schema *jsonschema.Schema) []string {
|
|
var validationErrors []string
|
|
|
|
for _, file := range files {
|
|
if err := validateFile(file, schema); err != nil {
|
|
validationErrors = append(validationErrors, fmt.Sprintf("%s: %v", file, err))
|
|
}
|
|
}
|
|
|
|
return validationErrors
|
|
}
|
|
|
|
func logValidationErrors(errors []string) {
|
|
fmt.Println("Validation errors:")
|
|
for _, msg := range errors {
|
|
fmt.Println(" -", msg)
|
|
}
|
|
}
|
|
|
|
func validateFile(path string, schema *jsonschema.Schema) error {
|
|
ext := filepath.Ext(path)
|
|
|
|
if binaryExtensions[ext] {
|
|
return nil
|
|
}
|
|
|
|
if ext == ".json" {
|
|
return validateJSONFile(path, schema)
|
|
}
|
|
|
|
return validateTextFileEncoding(path)
|
|
}
|
|
|
|
func validateJSONFile(path string, schema *jsonschema.Schema) error {
|
|
cleanJSON, err := readAndCleanJSONFile(path)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
var v interface{}
|
|
if err := json.Unmarshal(cleanJSON, &v); err != nil {
|
|
return fmt.Errorf("JSON parsing error: %w", err)
|
|
}
|
|
|
|
if err := schema.Validate(v); err != nil {
|
|
return fmt.Errorf("schema validation error: %w", err)
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func readAndCleanJSONFile(path string) ([]byte, error) {
|
|
data, err := os.ReadFile(path)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to read file: %w", err)
|
|
}
|
|
|
|
data = sanitizeJSONBytes(data)
|
|
return removeJSONComments(data), nil
|
|
}
|
|
|
|
func sanitizeJSONBytes(data []byte) []byte {
|
|
data = bytes.TrimPrefix(data, []byte{0xEF, 0xBB, 0xBF})
|
|
var out bytes.Buffer
|
|
|
|
for len(data) > 0 {
|
|
r, size := utf8.DecodeRune(data)
|
|
if r == utf8.RuneError && size == 1 || r == '\x00' {
|
|
data = data[1:]
|
|
continue
|
|
}
|
|
out.WriteRune(r)
|
|
data = data[size:]
|
|
}
|
|
|
|
return out.Bytes()
|
|
}
|
|
|
|
func removeJSONComments(data []byte) []byte {
|
|
var lines [][]byte
|
|
|
|
for _, line := range bytes.Split(data, []byte("\n")) {
|
|
trim := bytes.TrimSpace(line)
|
|
if !bytes.HasPrefix(trim, []byte("//")) && len(trim) > 0 {
|
|
lines = append(lines, line)
|
|
}
|
|
}
|
|
|
|
return bytes.Join(lines, []byte("\n"))
|
|
}
|
|
|
|
func validateTextFileEncoding(path string) error {
|
|
data, err := os.ReadFile(path)
|
|
if err != nil {
|
|
return fmt.Errorf("failed to read file: %w", err)
|
|
}
|
|
|
|
for i := 0; i < len(data); {
|
|
if data[i] > 127 {
|
|
r, _ := utf8.DecodeRune(data[i:])
|
|
if r == utf8.RuneError {
|
|
return fmt.Errorf("invalid UTF-8 sequence at position %d", i)
|
|
}
|
|
return fmt.Errorf("non-ASCII character %U at position %d", r, i)
|
|
}
|
|
i++
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// Clean removes temporary directories
|
|
func Clean(dirs ...string) error {
|
|
fmt.Println("Cleaning up...")
|
|
|
|
for _, dir := range dirs {
|
|
if err := os.RemoveAll(dir); err != nil {
|
|
return fmt.Errorf("failed to remove %s: %w", dir, err)
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func createZip(zipPath string, dirs ...string) error {
|
|
zipFile, err := os.Create(zipPath)
|
|
if err != nil {
|
|
return fmt.Errorf("failed to create zip file: %w", err)
|
|
}
|
|
defer zipFile.Close()
|
|
|
|
zipWriter := zip.NewWriter(zipFile)
|
|
defer zipWriter.Close()
|
|
|
|
for _, dir := range dirs {
|
|
if err := addDirToZip(zipWriter, dir); err != nil {
|
|
return fmt.Errorf("failed to add directory %s to zip: %w", dir, err)
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func addDirToZip(zipWriter *zip.Writer, dirPath string) error {
|
|
return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error {
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
relPath, err := filepath.Rel(dirPath, filePath)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
zipPath := filepath.Join(filepath.Base(dirPath), relPath)
|
|
return addFileToZip(zipWriter, filePath, zipPath, info)
|
|
})
|
|
}
|
|
|
|
func addFileToZip(zipWriter *zip.Writer, filePath, zipPath string, info os.FileInfo) error {
|
|
header, err := zip.FileInfoHeader(info)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
header.Name = filepath.ToSlash(zipPath)
|
|
header.Method = zip.Deflate
|
|
|
|
if info.IsDir() {
|
|
header.Name += "/"
|
|
_, err := zipWriter.CreateHeader(header)
|
|
return err
|
|
}
|
|
|
|
file, err := os.Open(filePath)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer file.Close()
|
|
|
|
writer, err := zipWriter.CreateHeader(header)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
if binaryExtensions[strings.ToLower(filepath.Ext(filePath))] {
|
|
_, err = io.Copy(writer, file)
|
|
return err
|
|
}
|
|
|
|
content, err := io.ReadAll(file)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
_, err = writer.Write(convertToDOSLineEndings(content))
|
|
return err
|
|
}
|
|
|
|
func convertToDOSLineEndings(content []byte) []byte {
|
|
normalized := bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n"))
|
|
return bytes.ReplaceAll(normalized, []byte("\n"), []byte("\r\n"))
|
|
}
|