schema validation
This commit is contained in:
parent
0eb645d264
commit
8884c53f57
7 changed files with 445 additions and 27 deletions
239
magefile.go
239
magefile.go
|
@ -4,22 +4,36 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"unicode"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/magefile/mage/mg"
|
||||
"github.com/magefile/mage/sh"
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"github.com/santhosh-tekuri/jsonschema/v5"
|
||||
)
|
||||
|
||||
const (
|
||||
zipName = "DigitalStorageTweaks.zip"
|
||||
contentDir = "./ContentLib"
|
||||
pluginFile = "./DigitalStorageTweaks.uplugin"
|
||||
schemaPath = "./schema/CL_Recipe.json" // Local schema path
|
||||
)
|
||||
|
||||
var (
|
||||
// Binary file extensions to skip line ending conversion
|
||||
binaryExtensions = map[string]bool{
|
||||
".png": true, ".jpg": true, ".jpeg": true, ".bmp": true, ".gif": true,
|
||||
".dds": true, ".tga": true, ".psd": true, ".fbx": true, ".uasset": true,
|
||||
".umap": true,
|
||||
}
|
||||
)
|
||||
|
||||
// Default target
|
||||
|
@ -44,57 +58,125 @@ func Package() error {
|
|||
}
|
||||
}
|
||||
|
||||
// Create zip
|
||||
if err := sh.Run("7z", "a", "-r", zipName, "Windows/", "LinuxServer/", "WindowsServer/"); err != nil {
|
||||
// Create zip file
|
||||
if err := createZip(zipName, "Windows", "WindowsServer", "LinuxServer"); err != nil {
|
||||
return fmt.Errorf("creating zip: %w", err)
|
||||
}
|
||||
|
||||
// Clean temp dirs
|
||||
return Clean("Windows", "WindowsServer", "LinuxServer")
|
||||
}
|
||||
|
||||
// Validate checks all content files
|
||||
func Validate() error {
|
||||
fmt.Println("Validating files...")
|
||||
|
||||
// Check plugin file
|
||||
if err := checkFile(pluginFile); err != nil {
|
||||
const (
|
||||
baseSchemaURL = "https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/"
|
||||
localSchemaDir = "./schema"
|
||||
)
|
||||
|
||||
compiler := jsonschema.NewCompiler()
|
||||
|
||||
// Disable remote fetching
|
||||
compiler.LoadURL = func(url string) (io.ReadCloser, error) {
|
||||
return nil, fmt.Errorf("remote schema loading disabled: %s", url)
|
||||
}
|
||||
|
||||
// Walk through the schema directory and add all .json files
|
||||
err := filepath.Walk(localSchemaDir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil || info.IsDir() {
|
||||
return err
|
||||
}
|
||||
|
||||
if filepath.Ext(path) != ".json" {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(localSchemaDir, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolving relative path: %w", err)
|
||||
}
|
||||
|
||||
// Build the full schema ID
|
||||
schemaID := baseSchemaURL + filepath.ToSlash(relPath)
|
||||
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("reading %s: %w", path, err)
|
||||
}
|
||||
|
||||
err = compiler.AddResource(schemaID, bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return fmt.Errorf("adding schema %s: %w", schemaID, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load schemas: %w", err)
|
||||
}
|
||||
|
||||
// Main schema (must match its $id)
|
||||
mainSchemaID := baseSchemaURL + "CL_Recipe.json"
|
||||
|
||||
// Compile the main schema
|
||||
schema, err := compiler.Compile(mainSchemaID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid main schema: %w", err)
|
||||
}
|
||||
|
||||
// Validate plugin file
|
||||
if err := validateFile(pluginFile, schema); err != nil {
|
||||
return fmt.Errorf("plugin file: %w", err)
|
||||
}
|
||||
|
||||
// Check content directory
|
||||
// Validate all JSON files in the content directory
|
||||
return filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil || info.IsDir() {
|
||||
return err
|
||||
}
|
||||
return checkFile(path)
|
||||
return validateFile(path, schema)
|
||||
})
|
||||
}
|
||||
|
||||
func checkFile(path string) error {
|
||||
func validateFile(path string, schema *jsonschema.Schema) error {
|
||||
// Skip binary files
|
||||
switch filepath.Ext(path) {
|
||||
case ".png", ".jpg", ".jpeg", ".bmp", ".gif", ".dds", ".tga", ".psd", ".fbx", ".uasset", ".umap":
|
||||
ext := filepath.Ext(path)
|
||||
if binaryExtensions[ext] {
|
||||
return nil
|
||||
case ".json":
|
||||
return validateJSON(path)
|
||||
default:
|
||||
return validateEncoding(path)
|
||||
}
|
||||
|
||||
if ext == ".json" {
|
||||
return validateJSON(path, schema)
|
||||
}
|
||||
return validateEncoding(path)
|
||||
}
|
||||
|
||||
func validateJSON(path string) error {
|
||||
schemaLoader := gojsonschema.NewReferenceLoader("https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/CL_Recipe.json")
|
||||
documentLoader := gojsonschema.NewReferenceLoader("file:///" + filepath.ToSlash(path))
|
||||
|
||||
result, err := gojsonschema.Validate(schemaLoader, documentLoader)
|
||||
func validateJSON(path string, schema *jsonschema.Schema) error {
|
||||
content, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("schema validation failed: %w", err)
|
||||
return fmt.Errorf("failed to read JSON file: %w", err)
|
||||
}
|
||||
|
||||
if !result.Valid() {
|
||||
return fmt.Errorf("invalid JSON schema: %v", result.Errors())
|
||||
// Remove comment lines (starting with //)
|
||||
var filteredContent []byte
|
||||
for _, line := range bytes.Split(content, []byte("\n")) {
|
||||
trimmed := bytes.TrimSpace(line)
|
||||
if !bytes.HasPrefix(trimmed, []byte("//")) && len(trimmed) > 0 {
|
||||
filteredContent = append(filteredContent, line...)
|
||||
filteredContent = append(filteredContent, '\n')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate JSON
|
||||
var v interface{}
|
||||
if err := json.Unmarshal(filteredContent, &v); err != nil {
|
||||
return fmt.Errorf("invalid JSON in %s: %w", path, err)
|
||||
}
|
||||
|
||||
if err := schema.Validate(v); err != nil {
|
||||
return fmt.Errorf("schema validation failed for %s: %w", path, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -105,11 +187,16 @@ func validateEncoding(path string) error {
|
|||
}
|
||||
|
||||
// Check for non-ASCII
|
||||
for i := 0; i < len(content); i++ {
|
||||
if content[i] > unicode.MaxASCII {
|
||||
for i := 0; i < len(content); {
|
||||
b := content[i]
|
||||
if b > 127 { // Non-ASCII
|
||||
r, _ := utf8.DecodeRune(content[i:])
|
||||
if r == utf8.RuneError {
|
||||
return fmt.Errorf("invalid UTF-8 sequence at position %d", i)
|
||||
}
|
||||
return fmt.Errorf("non-ASCII character %U at position %d", r, i)
|
||||
}
|
||||
i++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -124,3 +211,101 @@ func Clean(list ...string) error {
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createZip creates a zip file from the specified directories
|
||||
func createZip(zipPath string, dirs ...string) error {
|
||||
// Create zip file
|
||||
zipFile, err := os.Create(zipPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating zip file: %w", err)
|
||||
}
|
||||
defer zipFile.Close()
|
||||
|
||||
// Create zip writer
|
||||
zipWriter := zip.NewWriter(zipFile)
|
||||
defer zipWriter.Close()
|
||||
|
||||
// Add each directory to the zip
|
||||
for _, dir := range dirs {
|
||||
if err := addDirToZip(zipWriter, dir); err != nil {
|
||||
return fmt.Errorf("adding %s to zip: %w", dir, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// addDirToZip adds a directory to the zip, converting text files to DOS line endings
|
||||
func addDirToZip(zipWriter *zip.Writer, dirPath string) error {
|
||||
return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create relative path for the zip file
|
||||
relPath, err := filepath.Rel(dirPath, filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zipPath := filepath.Join(filepath.Base(dirPath), relPath)
|
||||
|
||||
// Create zip file header
|
||||
header, err := zip.FileInfoHeader(info)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Name = filepath.ToSlash(zipPath) // Use forward slashes for zip compatibility
|
||||
|
||||
// Use compression for all files
|
||||
header.Method = zip.Deflate
|
||||
|
||||
// Handle directories
|
||||
if info.IsDir() {
|
||||
header.Name += "/"
|
||||
_, err := zipWriter.CreateHeader(header)
|
||||
return err
|
||||
}
|
||||
|
||||
// Open source file
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Create writer in zip
|
||||
writer, err := zipWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Process based on file type
|
||||
ext := strings.ToLower(filepath.Ext(filePath))
|
||||
if binaryExtensions[ext] {
|
||||
// Binary file - copy directly
|
||||
_, err = io.Copy(writer, file)
|
||||
return err
|
||||
} else {
|
||||
// Text file - read content and convert line endings
|
||||
content, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Convert LF to CRLF
|
||||
content = convertToDOSLineEndings(content)
|
||||
|
||||
// Write to zip
|
||||
_, err = writer.Write(content)
|
||||
return err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// convertToDOSLineEndings converts LF to CRLF while preserving existing CRLF
|
||||
func convertToDOSLineEndings(content []byte) []byte {
|
||||
// First normalize to LF
|
||||
normalized := bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n"))
|
||||
// Then convert to CRLF
|
||||
return bytes.ReplaceAll(normalized, []byte("\n"), []byte("\r\n"))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue