Created
July 7, 2024 17:50
-
-
Save daemonp/edc39c5df25ad9408103d5f071023aa7 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"bytes" | |
"encoding/binary" | |
"encoding/json" | |
"flag" | |
"fmt" | |
"io/ioutil" | |
"os" | |
"path/filepath" | |
"regexp" | |
"strconv" | |
"strings" | |
) | |
const ( | |
NEXE_SENTINEL = "<nexe~~sentinel>" | |
PKG_PRELUDE_PATTERN = `pkg/prelude/bootstrap.js` | |
) | |
type Nexe struct { | |
entrypoint string | |
node []byte | |
code []byte | |
bundle []byte | |
resources map[string][]int | |
files map[string][]byte | |
} | |
type Pkg struct { | |
virtfs map[string]map[string][]int | |
entrypoint string | |
symlinks map[string]string | |
dict map[string]interface{} | |
docompress string | |
files map[string][]byte | |
} | |
func slice(data []byte, i int) ([]byte, []byte) { | |
return data[:i], data[i:] | |
} | |
func checkNexe(data []byte) bool { | |
return bytes.Equal(data[len(data)-32:len(data)-16], []byte(NEXE_SENTINEL)) | |
} | |
func decompileNexe(data []byte) (*Nexe, error) { | |
nexe := &Nexe{} | |
entryRegex := regexp.MustCompile(`const entry = path\.resolve\(path\.dirname\(process\.execPath\),\"(\S*.js)\"\)`) | |
entryMatches := entryRegex.FindSubmatch(data) | |
if len(entryMatches) < 2 { | |
return nil, fmt.Errorf("entry point not found") | |
} | |
nexe.entrypoint = string(entryMatches[1]) | |
codeSize, bundleSize := 0, 0 | |
if err := binary.Read(bytes.NewReader(data[len(data)-16:]), binary.LittleEndian, &codeSize); err != nil { | |
return nil, fmt.Errorf("failed to read code size: %v", err) | |
} | |
if err := binary.Read(bytes.NewReader(data[len(data)-8:]), binary.LittleEndian, &bundleSize); err != nil { | |
return nil, fmt.Errorf("failed to read bundle size: %v", err) | |
} | |
start := len(data) - codeSize - bundleSize - len(NEXE_SENTINEL) - 16 | |
nexe.node, data = slice(data, start) | |
nexe.code, data = slice(data, codeSize) | |
nexe.bundle, data = slice(data, bundleSize) | |
resourceRegex := regexp.MustCompile(`process\.__nexe = (.*);\n`) | |
resourceMatches := resourceRegex.FindSubmatch(nexe.code) | |
if len(resourceMatches) < 2 { | |
return nil, fmt.Errorf("resources not found") | |
} | |
var resources map[string][]int | |
if err := json.Unmarshal(resourceMatches[1], &resources); err != nil { | |
return nil, fmt.Errorf("failed to unmarshal resources: %v", err) | |
} | |
nexe.resources = resources | |
nexe.files = make(map[string][]byte) | |
for path, posLength := range nexe.resources { | |
pos, length := posLength[0], posLength[1] | |
nexe.files[path] = nexe.bundle[pos : pos+length] | |
} | |
return nexe, nil | |
} | |
func checkPkg(data []byte) bool { | |
pkgPreludeRegex := regexp.MustCompile(PKG_PRELUDE_PATTERN) | |
return pkgPreludeRegex.Match(data) | |
} | |
func decompilePkg(data []byte) (*Pkg, error) { | |
pkg := &Pkg{} | |
payloadPosRegex := regexp.MustCompile(`var PAYLOAD_POSITION = '(\d*).*'`) | |
payloadLenRegex := regexp.MustCompile(`var PAYLOAD_SIZE = '(\d*).*'`) | |
preludePosRegex := regexp.MustCompile(`var PRELUDE_POSITION = '(\d*).*'`) | |
preludeLenRegex := regexp.MustCompile(`var PRELUDE_SIZE = '(\d*).*'`) | |
payloadPosMatch := payloadPosRegex.FindSubmatch(data) | |
payloadLenMatch := payloadLenRegex.FindSubmatch(data) | |
preludePosMatch := preludePosRegex.FindSubmatch(data) | |
preludeLenMatch := preludeLenRegex.FindSubmatch(data) | |
if len(payloadPosMatch) < 2 || len(payloadLenMatch) < 2 || len(preludePosMatch) < 2 || len(preludeLenMatch) < 2 { | |
return nil, fmt.Errorf("positions and sizes not found") | |
} | |
payloadPos := atoi(string(payloadPosMatch[1])) | |
payloadLen := atoi(string(payloadLenMatch[1])) | |
preludePos := atoi(string(preludePosMatch[1])) | |
preludeLen := atoi(string(preludeLenMatch[1])) | |
payload := data[payloadPos : payloadPos+payloadLen] | |
prelude := data[preludePos : preludePos+preludeLen] | |
preludeRegex := regexp.MustCompile(`\/\/# sourceMappingURL=common\.js\.map\n\},\n(?P<virtfs>\{.*\})\n,\n(?P<entrypoint>.*)\n,\n(?P<symlinks>\{.*\})\n,\n(?P<_dict>\{.*\})\n,\n(?P<docompress>\d*)\n\);`) | |
preludeMatches := preludeRegex.FindSubmatch(prelude) | |
if len(preludeMatches) < 6 { | |
return nil, fmt.Errorf("prelude data not found") | |
} | |
virtfs := preludeMatches[1] | |
entrypoint := preludeMatches[2] | |
symlinks := preludeMatches[3] | |
_dict := preludeMatches[4] | |
docompress := preludeMatches[5] | |
if err := json.Unmarshal(virtfs, &pkg.virtfs); err != nil { | |
return nil, fmt.Errorf("failed to unmarshal virtfs: %v", err) | |
} | |
pkg.entrypoint = string(entrypoint) | |
if err := json.Unmarshal(symlinks, &pkg.symlinks); err != nil { | |
return nil, fmt.Errorf("failed to unmarshal symlinks: %v", err) | |
} | |
if err := json.Unmarshal(_dict, &pkg.dict); err != nil { | |
return nil, fmt.Errorf("failed to unmarshal dict: %v", err) | |
} | |
pkg.docompress = string(docompress) | |
pkg.files = make(map[string][]byte) | |
for path, slices := range pkg.virtfs { | |
for typ, posLength := range slices { | |
pos, length := posLength[0], posLength[1] | |
path = strings.Replace(path, "C:\\snapshot\\", "", 1) | |
path = strings.Replace(path, "/snapshot/", "", 1) | |
dataSlice := payload[pos : pos+length] | |
if typ == "0" { // BLOB | |
path = strings.Replace(path, ".js", ".jsc", 1) | |
pkg.files[path] = dataSlice | |
} else if typ == "1" { // CONTENT | |
pkg.files[path] = dataSlice | |
} | |
} | |
} | |
return pkg, nil | |
} | |
func atoi(s string) int { | |
n, err := strconv.Atoi(s) | |
if err != nil { | |
panic(err) | |
} | |
return n | |
} | |
func main() { | |
outputDir := flag.String("output", ".", "Output base directory") | |
flag.Parse() | |
if flag.NArg() != 1 { | |
fmt.Println("Usage: decompiler <path to binary> [-output <output directory>]") | |
os.Exit(1) | |
} | |
path := flag.Arg(0) | |
data, err := ioutil.ReadFile(path) | |
if err != nil { | |
fmt.Printf("Failed to read file: %v\n", err) | |
os.Exit(1) | |
} | |
var files map[string][]byte | |
var entrypoint string | |
if checkNexe(data) { | |
nexe, err := decompileNexe(data) | |
if err != nil { | |
fmt.Printf("Failed to decompile nexe: %v\n", err) | |
os.Exit(1) | |
} | |
files = nexe.files | |
entrypoint = nexe.entrypoint | |
} else if checkPkg(data) { | |
pkg, err := decompilePkg(data) | |
if err != nil { | |
fmt.Printf("Failed to decompile pkg: %v\n", err) | |
os.Exit(1) | |
} | |
files = pkg.files | |
entrypoint = pkg.entrypoint | |
} else { | |
fmt.Println("Unknown binary format") | |
os.Exit(1) | |
} | |
fmt.Printf("Entrypoint: %s\n", entrypoint) | |
for path, content := range files { | |
fullPath := filepath.Join(*outputDir, path) | |
dir := filepath.Dir(fullPath) | |
if err := os.MkdirAll(dir, 0755); err != nil { | |
fmt.Printf("Failed to create directory %s: %v\n", dir, err) | |
os.Exit(1) | |
} | |
err := ioutil.WriteFile(fullPath, content, 0644) | |
if err != nil { | |
fmt.Printf("Failed to write file %s: %v\n", fullPath, err) | |
os.Exit(1) | |
} | |
fmt.Printf("Extracted file: %s\n", fullPath) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment