Compare commits
2 commits
8884c53f57
...
222615a18a
Author | SHA1 | Date | |
---|---|---|---|
222615a18a | |||
1ea86db763 |
25 changed files with 107 additions and 171 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -34,7 +34,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_16K_C"
|
"Schematic_DS_Mam_Drives_16K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_1K_C"
|
"Schematic_DS_Mam_Drives_1K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schem_DS_T3_2_C"
|
"Schem_DS_T3_2_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_2K_C"
|
"Schematic_DS_Mam_Drives_2K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_32K_C"
|
"Schematic_DS_Mam_Drives_32K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 60.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_4K_C"
|
"Schematic_DS_Mam_Drives_4K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_512_C"
|
"Schematic_DS_Mam_Drives_512_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -38,7 +38,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_64K_C"
|
"Schematic_DS_Mam_Drives_64K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 60.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Drives_8K_C"
|
"Schematic_DS_Mam_Drives_8K_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 45.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schematic_DS_Mam_Adapters_Universal_C"
|
"Schematic_DS_Mam_Adapters_Universal_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 1.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schem_DS_T3_1_C"
|
"Schem_DS_T3_1_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 5.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schem_DS_T6_2_C"
|
"Schem_DS_T6_2_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 1.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
"UnlockedBy": [
|
"UnlockedBy": [
|
||||||
"Schem_DS_T6_2_C"
|
"Schem_DS_T6_2_C"
|
||||||
],
|
],
|
||||||
"ManualManufacturingDuration": 1.0,
|
"ManualManufacturingMultiplier": 0.0,
|
||||||
"VariablePowerConsumptionConstant": 0.0,
|
"VariablePowerConsumptionConstant": 0.0,
|
||||||
"VariablePowerConsumptionFactor": 1.0,
|
"VariablePowerConsumptionFactor": 1.0,
|
||||||
"ClearIngredients": true,
|
"ClearIngredients": true,
|
||||||
|
|
221
magefile.go
221
magefile.go
|
@ -24,31 +24,24 @@ const (
|
||||||
zipName = "DigitalStorageTweaks.zip"
|
zipName = "DigitalStorageTweaks.zip"
|
||||||
contentDir = "./ContentLib"
|
contentDir = "./ContentLib"
|
||||||
pluginFile = "./DigitalStorageTweaks.uplugin"
|
pluginFile = "./DigitalStorageTweaks.uplugin"
|
||||||
schemaPath = "./schema/CL_Recipe.json" // Local schema path
|
schemaBaseURL = "https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/"
|
||||||
|
schemaDir = "./schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var binaryExtensions = map[string]bool{
|
||||||
// Binary file extensions to skip line ending conversion
|
|
||||||
binaryExtensions = map[string]bool{
|
|
||||||
".png": true, ".jpg": true, ".jpeg": true, ".bmp": true, ".gif": true,
|
".png": true, ".jpg": true, ".jpeg": true, ".bmp": true, ".gif": true,
|
||||||
".dds": true, ".tga": true, ".psd": true, ".fbx": true, ".uasset": true,
|
".dds": true, ".tga": true, ".psd": true, ".fbx": true, ".uasset": true,
|
||||||
".umap": true,
|
".umap": true,
|
||||||
}
|
}
|
||||||
)
|
|
||||||
|
|
||||||
// Default target
|
|
||||||
var Default = Build
|
var Default = Build
|
||||||
|
|
||||||
// Build runs the full pipeline
|
|
||||||
func Build() {
|
func Build() {
|
||||||
mg.SerialDeps(Validate, Package)
|
mg.SerialDeps(Validate, Package)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Package creates the distribution zip
|
|
||||||
func Package() error {
|
func Package() error {
|
||||||
fmt.Println("Packaging files...")
|
fmt.Println("Packaging files...")
|
||||||
|
|
||||||
// Create target directories
|
|
||||||
for _, dir := range []string{"Windows", "WindowsServer", "LinuxServer"} {
|
for _, dir := range []string{"Windows", "WindowsServer", "LinuxServer"} {
|
||||||
if err := os.MkdirAll(dir, 0755); err != nil {
|
if err := os.MkdirAll(dir, 0755); err != nil {
|
||||||
return fmt.Errorf("creating %s: %w", dir, err)
|
return fmt.Errorf("creating %s: %w", dir, err)
|
||||||
|
@ -57,140 +50,126 @@ func Package() error {
|
||||||
return fmt.Errorf("copying to %s: %w", dir, err)
|
return fmt.Errorf("copying to %s: %w", dir, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create zip file
|
|
||||||
if err := createZip(zipName, "Windows", "WindowsServer", "LinuxServer"); err != nil {
|
if err := createZip(zipName, "Windows", "WindowsServer", "LinuxServer"); err != nil {
|
||||||
return fmt.Errorf("creating zip: %w", err)
|
return fmt.Errorf("creating zip: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean temp dirs
|
|
||||||
return Clean("Windows", "WindowsServer", "LinuxServer")
|
return Clean("Windows", "WindowsServer", "LinuxServer")
|
||||||
}
|
}
|
||||||
|
|
||||||
func Validate() error {
|
func Validate() error {
|
||||||
fmt.Println("Validating files...")
|
fmt.Println("Validating files...")
|
||||||
|
|
||||||
const (
|
|
||||||
baseSchemaURL = "https://raw.githubusercontent.com/budak7273/ContentLib_Documentation/main/JsonSchemas/"
|
|
||||||
localSchemaDir = "./schema"
|
|
||||||
)
|
|
||||||
|
|
||||||
compiler := jsonschema.NewCompiler()
|
compiler := jsonschema.NewCompiler()
|
||||||
|
|
||||||
// Disable remote fetching
|
|
||||||
compiler.LoadURL = func(url string) (io.ReadCloser, error) {
|
compiler.LoadURL = func(url string) (io.ReadCloser, error) {
|
||||||
return nil, fmt.Errorf("remote schema loading disabled: %s", url)
|
return nil, fmt.Errorf("remote schema loading disabled: %s", url)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Walk through the schema directory and add all .json files
|
if err := filepath.Walk(schemaDir, func(path string, info os.FileInfo, err error) error {
|
||||||
err := filepath.Walk(localSchemaDir, func(path string, info os.FileInfo, err error) error {
|
if err != nil || info.IsDir() || filepath.Ext(path) != ".json" {
|
||||||
if err != nil || info.IsDir() {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
relPath, err := filepath.Rel(schemaDir, path)
|
||||||
if filepath.Ext(path) != ".json" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath, err := filepath.Rel(localSchemaDir, path)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("resolving relative path: %w", err)
|
return err
|
||||||
}
|
}
|
||||||
|
id := schemaBaseURL + filepath.ToSlash(relPath)
|
||||||
// Build the full schema ID
|
|
||||||
schemaID := baseSchemaURL + filepath.ToSlash(relPath)
|
|
||||||
|
|
||||||
data, err := ioutil.ReadFile(path)
|
data, err := ioutil.ReadFile(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("reading %s: %w", path, err)
|
return err
|
||||||
}
|
}
|
||||||
|
return compiler.AddResource(id, bytes.NewReader(data))
|
||||||
err = compiler.AddResource(schemaID, bytes.NewReader(data))
|
}); err != nil {
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("adding schema %s: %w", schemaID, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to load schemas: %w", err)
|
return fmt.Errorf("failed to load schemas: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Main schema (must match its $id)
|
schema, err := compiler.Compile(schemaBaseURL + "CL_Recipe.json")
|
||||||
mainSchemaID := baseSchemaURL + "CL_Recipe.json"
|
|
||||||
|
|
||||||
// Compile the main schema
|
|
||||||
schema, err := compiler.Compile(mainSchemaID)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("invalid main schema: %w", err)
|
return fmt.Errorf("invalid main schema: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate plugin file
|
var failed []string
|
||||||
if err := validateFile(pluginFile, schema); err != nil {
|
paths := []string{pluginFile}
|
||||||
return fmt.Errorf("plugin file: %w", err)
|
_ = filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err == nil && !info.IsDir() {
|
||||||
|
paths = append(paths, path)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, path := range paths {
|
||||||
|
if err := validateFile(path, schema); err != nil {
|
||||||
|
failed = append(failed, fmt.Sprintf("%s: %v", path, err))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate all JSON files in the content directory
|
if len(failed) > 0 {
|
||||||
return filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error {
|
fmt.Println("Validation errors:")
|
||||||
if err != nil || info.IsDir() {
|
for _, msg := range failed {
|
||||||
return err
|
fmt.Println(" -", msg)
|
||||||
}
|
}
|
||||||
return validateFile(path, schema)
|
return fmt.Errorf("%d file(s) failed validation", len(failed))
|
||||||
})
|
}
|
||||||
|
|
||||||
|
fmt.Println("All files validated successfully.")
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateFile(path string, schema *jsonschema.Schema) error {
|
func validateFile(path string, schema *jsonschema.Schema) error {
|
||||||
// Skip binary files
|
if binaryExtensions[filepath.Ext(path)] {
|
||||||
ext := filepath.Ext(path)
|
|
||||||
if binaryExtensions[ext] {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
if filepath.Ext(path) == ".json" {
|
||||||
if ext == ".json" {
|
|
||||||
return validateJSON(path, schema)
|
return validateJSON(path, schema)
|
||||||
}
|
}
|
||||||
return validateEncoding(path)
|
return validateEncoding(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateJSON(path string, schema *jsonschema.Schema) error {
|
func validateJSON(path string, schema *jsonschema.Schema) error {
|
||||||
content, err := ioutil.ReadFile(path)
|
data, err := ioutil.ReadFile(path)
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to read JSON file: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove comment lines (starting with //)
|
|
||||||
var filteredContent []byte
|
|
||||||
for _, line := range bytes.Split(content, []byte("\n")) {
|
|
||||||
trimmed := bytes.TrimSpace(line)
|
|
||||||
if !bytes.HasPrefix(trimmed, []byte("//")) && len(trimmed) > 0 {
|
|
||||||
filteredContent = append(filteredContent, line...)
|
|
||||||
filteredContent = append(filteredContent, '\n')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate JSON
|
|
||||||
var v interface{}
|
|
||||||
if err := json.Unmarshal(filteredContent, &v); err != nil {
|
|
||||||
return fmt.Errorf("invalid JSON in %s: %w", path, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := schema.Validate(v); err != nil {
|
|
||||||
return fmt.Errorf("schema validation failed for %s: %w", path, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func validateEncoding(path string) error {
|
|
||||||
content, err := ioutil.ReadFile(path)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
data = sanitizeJSONBytes(data)
|
||||||
|
|
||||||
// Check for non-ASCII
|
var lines [][]byte
|
||||||
for i := 0; i < len(content); {
|
for _, line := range bytes.Split(data, []byte("\n")) {
|
||||||
b := content[i]
|
trim := bytes.TrimSpace(line)
|
||||||
if b > 127 { // Non-ASCII
|
if !bytes.HasPrefix(trim, []byte("//")) && len(trim) > 0 {
|
||||||
r, _ := utf8.DecodeRune(content[i:])
|
lines = append(lines, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
clean := bytes.Join(lines, []byte("\n"))
|
||||||
|
|
||||||
|
var v interface{}
|
||||||
|
if err := json.Unmarshal(clean, &v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return schema.Validate(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func sanitizeJSONBytes(data []byte) []byte {
|
||||||
|
data = bytes.TrimPrefix(data, []byte{0xEF, 0xBB, 0xBF})
|
||||||
|
var out bytes.Buffer
|
||||||
|
for len(data) > 0 {
|
||||||
|
r, size := utf8.DecodeRune(data)
|
||||||
|
if r == utf8.RuneError && size == 1 || r == '\x00' {
|
||||||
|
data = data[1:]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
out.WriteRune(r)
|
||||||
|
data = data[size:]
|
||||||
|
}
|
||||||
|
return out.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateEncoding(path string) error {
|
||||||
|
data, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for i := 0; i < len(data); {
|
||||||
|
if data[i] > 127 {
|
||||||
|
// DO NOT declare "size", it is unused and will cause a compiler error
|
||||||
|
r, _ := utf8.DecodeRune(data[i:])
|
||||||
if r == utf8.RuneError {
|
if r == utf8.RuneError {
|
||||||
return fmt.Errorf("invalid UTF-8 sequence at position %d", i)
|
return fmt.Errorf("invalid UTF-8 sequence at position %d", i)
|
||||||
}
|
}
|
||||||
|
@ -201,7 +180,6 @@ func validateEncoding(path string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean removes all build artifacts
|
|
||||||
func Clean(list ...string) error {
|
func Clean(list ...string) error {
|
||||||
fmt.Println("Cleaning up...")
|
fmt.Println("Cleaning up...")
|
||||||
for _, f := range list {
|
for _, f := range list {
|
||||||
|
@ -212,100 +190,67 @@ func Clean(list ...string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// createZip creates a zip file from the specified directories
|
|
||||||
func createZip(zipPath string, dirs ...string) error {
|
func createZip(zipPath string, dirs ...string) error {
|
||||||
// Create zip file
|
|
||||||
zipFile, err := os.Create(zipPath)
|
zipFile, err := os.Create(zipPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("creating zip file: %w", err)
|
return fmt.Errorf("creating zip file: %w", err)
|
||||||
}
|
}
|
||||||
defer zipFile.Close()
|
defer zipFile.Close()
|
||||||
|
|
||||||
// Create zip writer
|
|
||||||
zipWriter := zip.NewWriter(zipFile)
|
zipWriter := zip.NewWriter(zipFile)
|
||||||
defer zipWriter.Close()
|
defer zipWriter.Close()
|
||||||
|
|
||||||
// Add each directory to the zip
|
|
||||||
for _, dir := range dirs {
|
for _, dir := range dirs {
|
||||||
if err := addDirToZip(zipWriter, dir); err != nil {
|
if err := addDirToZip(zipWriter, dir); err != nil {
|
||||||
return fmt.Errorf("adding %s to zip: %w", dir, err)
|
return fmt.Errorf("adding %s to zip: %w", dir, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// addDirToZip adds a directory to the zip, converting text files to DOS line endings
|
|
||||||
func addDirToZip(zipWriter *zip.Writer, dirPath string) error {
|
func addDirToZip(zipWriter *zip.Writer, dirPath string) error {
|
||||||
return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error {
|
return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create relative path for the zip file
|
|
||||||
relPath, err := filepath.Rel(dirPath, filePath)
|
relPath, err := filepath.Rel(dirPath, filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
zipPath := filepath.Join(filepath.Base(dirPath), relPath)
|
zipPath := filepath.Join(filepath.Base(dirPath), relPath)
|
||||||
|
|
||||||
// Create zip file header
|
|
||||||
header, err := zip.FileInfoHeader(info)
|
header, err := zip.FileInfoHeader(info)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
header.Name = filepath.ToSlash(zipPath) // Use forward slashes for zip compatibility
|
header.Name = filepath.ToSlash(zipPath)
|
||||||
|
|
||||||
// Use compression for all files
|
|
||||||
header.Method = zip.Deflate
|
header.Method = zip.Deflate
|
||||||
|
|
||||||
// Handle directories
|
|
||||||
if info.IsDir() {
|
if info.IsDir() {
|
||||||
header.Name += "/"
|
header.Name += "/"
|
||||||
_, err := zipWriter.CreateHeader(header)
|
_, err := zipWriter.CreateHeader(header)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Open source file
|
|
||||||
file, err := os.Open(filePath)
|
file, err := os.Open(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
// Create writer in zip
|
|
||||||
writer, err := zipWriter.CreateHeader(header)
|
writer, err := zipWriter.CreateHeader(header)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if binaryExtensions[strings.ToLower(filepath.Ext(filePath))] {
|
||||||
// Process based on file type
|
|
||||||
ext := strings.ToLower(filepath.Ext(filePath))
|
|
||||||
if binaryExtensions[ext] {
|
|
||||||
// Binary file - copy directly
|
|
||||||
_, err = io.Copy(writer, file)
|
_, err = io.Copy(writer, file)
|
||||||
return err
|
return err
|
||||||
} else {
|
}
|
||||||
// Text file - read content and convert line endings
|
|
||||||
content, err := ioutil.ReadAll(file)
|
content, err := ioutil.ReadAll(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert LF to CRLF
|
|
||||||
content = convertToDOSLineEndings(content)
|
content = convertToDOSLineEndings(content)
|
||||||
|
|
||||||
// Write to zip
|
|
||||||
_, err = writer.Write(content)
|
_, err = writer.Write(content)
|
||||||
return err
|
return err
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// convertToDOSLineEndings converts LF to CRLF while preserving existing CRLF
|
|
||||||
func convertToDOSLineEndings(content []byte) []byte {
|
func convertToDOSLineEndings(content []byte) []byte {
|
||||||
// First normalize to LF
|
|
||||||
normalized := bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n"))
|
normalized := bytes.ReplaceAll(content, []byte("\r\n"), []byte("\n"))
|
||||||
// Then convert to CRLF
|
|
||||||
return bytes.ReplaceAll(normalized, []byte("\n"), []byte("\r\n"))
|
return bytes.ReplaceAll(normalized, []byte("\n"), []byte("\r\n"))
|
||||||
}
|
}
|
||||||
|
|
9
makefile
9
makefile
|
@ -1,9 +0,0 @@
|
||||||
ZIP_NAME = DigitalStorageTweaks.zip
|
|
||||||
FILES = ./ContentLib ./DigitalStorageTweaks.uplugin
|
|
||||||
|
|
||||||
all:
|
|
||||||
mkdir -p Windows WindowsServer LinuxServer
|
|
||||||
cp -r $(FILES) Windows/
|
|
||||||
cp -r $(FILES) WindowsServer/
|
|
||||||
cp -r $(FILES) LinuxServer/
|
|
||||||
7z a -r $(ZIP_NAME) Windows/ LinuxServer/ WindowsServer/
|
|
Loading…
Add table
Add a link
Reference in a new issue