Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
33b70ee
fix: moving unpacked directory flags to be post-processing of symlinks
mmaietta Dec 4, 2025
507976d
update changeset
mmaietta Dec 4, 2025
ad30221
Merge branch 'master' into fix/asar-unpacked
mmaietta Dec 4, 2025
fc3456a
revert some things
mmaietta Dec 5, 2025
3f0a68f
fix: validate package version when traversing directories in search o…
mmaietta Dec 5, 2025
4347729
tmp save. yarn 4 seems to work
mmaietta Dec 5, 2025
60aea42
fancy cache refactor
mmaietta Dec 6, 2025
72c4050
fix semver check
mmaietta Dec 6, 2025
e2ef9ad
cleanup
mmaietta Dec 6, 2025
0c697bd
reset snapshots
mmaietta Dec 6, 2025
99476eb
reset to 26.2.0
mmaietta Dec 6, 2025
feec823
cleanup and use more cache stuff
mmaietta Dec 6, 2025
b034d8b
cleaning up\
mmaietta Dec 6, 2025
c1dfc94
mac snapshot update
mmaietta Dec 6, 2025
58d60a4
MOAR COVERAGE is good
mmaietta Dec 6, 2025
acc4eaf
Merge commit 'e043df57604fb029fd8f9cf7d835b8366ee71aa6' into fix/node…
mmaietta Dec 6, 2025
87c5c4b
cleanup
mmaietta Dec 6, 2025
1e48a70
prettier
mmaietta Dec 6, 2025
91e8363
Merge branch 'master' into fix/node-collector3
mmaietta Dec 6, 2025
32d9a0d
Merge remote-tracking branch 'origin/master' into fix/asar-unpacked
mmaietta Dec 6, 2025
c115057
Merge commit '91e83633eb1ec2796fd870b84b845f0bbdba3f53' into fix/asar…
mmaietta Dec 6, 2025
08bde8a
tmp save
mmaietta Dec 7, 2025
377b3ee
little refactor of the file to streamline/short-circuit logic paths
mmaietta Dec 7, 2025
1ae59a7
Merge remote-tracking branch 'origin/master' into fix/asar-unpacked
mmaietta Dec 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/light-flies-count.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"app-builder-lib": patch
---

fix: do not process unpacked parent directories. Let it automatically be parsed in sequence and through electron/asar streaming
201 changes: 116 additions & 85 deletions packages/app-builder-lib/src/asar/asarUtil.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { createPackageFromStreams, AsarStreamType, AsarDirectory } from "@electron/asar"
import { log } from "builder-util"
import { exists, Filter } from "builder-util/out/fs"
import { isEmptyOrSpaces, log } from "builder-util"
import { exists, Filter, FilterStats } from "builder-util/out/fs"
import * as fs from "fs-extra"
import { readlink } from "fs-extra"
import * as path from "path"
Expand Down Expand Up @@ -91,26 +91,39 @@ export class AsarPackager {
}
}

const results: AsarStreamType[] = []
const resultsPaths = new Set<string>()
const resultsMap = new Map<string, AsarStreamType>()
const streamOrdering: string[] = []
const normalizedUnpackedPaths = Array.from(unpackedPaths).map(p => path.normalize(p))

// Optimized unpacked check with short-circuit evaluation
const isUnpacked = (dir: string, file?: string, stat?: FilterStats): boolean => {
const normalizedDir = path.normalize(dir)

// Check file pattern first (most specific)
if (!isEmptyOrSpaces(file) && stat && this.config.unpackPattern?.(file, stat)) {
return true
}

// Check if path is within any unpacked directory
for (const unpackedPath of normalizedUnpackedPaths) {
if (normalizedDir === unpackedPath || normalizedDir.startsWith(unpackedPath + path.sep)) {
return true
}
}

return false
}

// First pass: process all files in order, ensuring parent directories exist
for (const fileSet of fileSets) {
// Don't use Promise.all, we need to retain order of execution/iteration through the already-ordered fileset
for (const [index, file] of fileSet.files.entries()) {
const transformedData = fileSet.transformedFiles?.get(index)
const stat = fileSet.metadata.get(file)!
const destination = path.relative(this.config.defaultDestination, getDestinationPath(file, fileSet))

const paths = Array.from(unpackedPaths).map(p => path.normalize(p))

const isChildDirectory = (fileOrDirPath: string) =>
paths.includes(path.normalize(fileOrDirPath)) || paths.some(unpackedPath => path.normalize(fileOrDirPath).startsWith(unpackedPath + path.sep))
const isUnpacked = (dir: string) => {
const isChild = isChildDirectory(dir)
const isFileUnpacked = this.config.unpackPattern?.(file, stat) ?? false
return isChild || isFileUnpacked
}

this.processParentDirectories(isUnpacked, destination, results, resultsPaths)
// Ensure parent directories exist before processing file
this.ensureParentDirectories(destination, resultsMap, streamOrdering)

const result = await this.processFileOrSymlink({
file,
Expand All @@ -120,31 +133,58 @@ export class AsarPackager {
stat,
isUnpacked,
})
if (result != null) {
results.push(result)
resultsPaths.add(result.path)

if (result && !resultsMap.has(result.path)) {
resultsMap.set(result.path, result)
streamOrdering.push(result.path)
}
}
}
return results
}

private processParentDirectories(isUnpacked: (path: string) => boolean, destination: string, results: AsarStreamType[], resultsPaths: Set<string>) {
// process parent directories
let superDir = path.dirname(path.normalize(destination))
while (superDir !== ".") {
const dir: AsarDirectory = {
type: "directory",
path: superDir,
unpacked: isUnpacked(superDir),
// Second pass: propagate unpacked flag to parent directories
for (const entry of resultsMap.values()) {
if (entry.unpacked) {
this.markParentDirectoriesAsUnpacked(entry.path, resultsMap, isUnpacked)
}
// add to results if not already present
if (!resultsPaths.has(dir.path)) {
results.push(dir)
resultsPaths.add(dir.path)
}

// Build final results array maintaining processing order
return streamOrdering.map(path => resultsMap.get(path)!).filter(Boolean)
}

private ensureParentDirectories(destination: string, resultsMap: Map<string, AsarStreamType>, streamOrdering: string[]): void {
const parents: string[] = []
let current = path.dirname(path.normalize(destination))

// Collect all parent directories from deepest to root
while (current !== ".") {
parents.unshift(current)
current = path.dirname(current)
}

// Add parent directories in order (root to deepest)
for (const parentPath of parents) {
if (!resultsMap.has(parentPath)) {
const dir: AsarDirectory = {
type: "directory",
path: parentPath,
unpacked: false, // Updated in second pass if needed
}
resultsMap.set(parentPath, dir)
streamOrdering.push(parentPath)
}
}
}

private markParentDirectoriesAsUnpacked(destination: string, resultsMap: Map<string, AsarStreamType>, isUnpacked: (path: string) => boolean): void {
let current = path.dirname(path.normalize(destination))

superDir = path.dirname(superDir)
while (current !== ".") {
const entry = resultsMap.get(current)
if (entry && isUnpacked(current)) {
entry.unpacked = true
}
current = path.dirname(current)
}
}

Expand All @@ -154,54 +194,57 @@ export class AsarPackager {
stat: fs.Stats
fileSet: ResolvedFileSet
transformedData: string | Buffer | undefined
isUnpacked: (path: string) => boolean
isUnpacked: (dir: string, file?: string, stat?: FilterStats) => boolean
}): Promise<AsarStreamType> {
const { isUnpacked, transformedData, file, destination, stat } = options
const unpacked = isUnpacked(destination)
const unpacked = isUnpacked(destination, file, stat)

// Handle directories
if (!stat.isFile() && !stat.isSymbolicLink()) {
return { path: destination, unpacked, type: "directory" }
}

// write any data if provided, skip symlink check
// Handle transformed data (pre-processed content)
if (transformedData != null) {
const streamGenerator = () => {
return new Readable({
read() {
this.push(transformedData)
this.push(null)
},
})
}
const size = Buffer.byteLength(transformedData)
return { path: destination, streamGenerator, unpacked, type: "file", stat: { mode: stat.mode, size } }
return {
path: destination,
streamGenerator: () =>
new Readable({
read() {
this.push(transformedData)
this.push(null)
},
}),
unpacked,
type: "file",
stat: { mode: stat.mode, size },
}
}

// verify that the file is not a direct link or symlinked to access/copy a system file
await this.protectSystemAndUnsafePaths(file, await this.packager.info.getWorkspaceRoot())

const config = {
const baseConfig = {
path: destination,
streamGenerator: () => fs.createReadStream(file),
unpacked,
stat,
}

// file, stream directly
// Handle regular files
if (!stat.isSymbolicLink()) {
return {
...config,
type: "file",
}
return { ...baseConfig, type: "file" }
}

// okay, it must be a symlink. evaluate link to be relative to source file in asar
// Handle symlinks - make relative to source location
let link = await readlink(file)
if (path.isAbsolute(link)) {
link = path.relative(path.dirname(file), link)
}

return {
...config,
...baseConfig,
type: "link",
symlink: link,
}
Expand Down Expand Up @@ -241,31 +284,27 @@ export class AsarPackager {
for (const [oldIndex, value] of fileSet.transformedFiles) {
const newIndex = indexMap.get(oldIndex)
if (newIndex === undefined) {
const file = fileSet.files[oldIndex]
throw new Error(`Internal error: ${file} was lost while ordering asar`)
throw new Error(`Internal error: ${fileSet.files[oldIndex]} was lost while ordering asar`)
}

transformedFiles.set(newIndex, value)
}
}

const { src, destination, metadata } = fileSet

return {
src,
destination,
metadata,
src: fileSet.src,
destination: fileSet.destination,
metadata: fileSet.metadata,
files: sortedFileEntries.map(([, file]) => file),
transformedFiles,
}
}

private async checkAgainstRoots(target: string, allowRoots: string[]): Promise<boolean> {
const resolved = await resolvePath(target)
if (!resolved) return false

for (const root of allowRoots) {
const resolvedRoot = root
if (resolved === resolvedRoot || resolved?.startsWith(resolvedRoot + path.sep)) {
if (resolved === root || resolved.startsWith(root + path.sep)) {
return true
}
}
Expand All @@ -276,33 +315,25 @@ export class AsarPackager {
const resolved = await resolvePath(file)
const logFields = { source: file, realPath: resolved }

const isUnsafe = async () => {
const workspace = await resolvePath(workspaceRoot)

if (workspace && resolved?.startsWith(workspace)) {
// if in workspace, always safe
return false
}

const allowed = await this.checkAgainstRoots(file, await ALLOWLIST)
if (allowed) {
return false // allowlist is priority
}
const workspace = await resolvePath(workspaceRoot)

const denied = await this.checkAgainstRoots(file, await DENYLIST)
if (denied) {
log.error(logFields, `denied access to system or unsafe path`)
return true
}
// default
log.debug(logFields, `path is outside of explicit safe paths, defaulting to safe`)
return false
// If in workspace, always safe
if (workspace && resolved?.startsWith(workspace)) {
return
}

const unsafe = await isUnsafe()
// Check allowlist (priority)
if (await this.checkAgainstRoots(file, await ALLOWLIST)) {
return
}

if (unsafe) {
// Check denylist
if (await this.checkAgainstRoots(file, await DENYLIST)) {
log.error(logFields, `denied access to system or unsafe path`)
throw new Error(`Cannot copy file [${file}] symlinked to file [${resolved}] outside the package to a system or unsafe path`)
}

// Default: outside explicit paths but not explicitly denied
log.debug(logFields, `path is outside of explicit safe paths, defaulting to safe`)
}
}
1 change: 0 additions & 1 deletion test/snapshots/HoistedNodeModuleTest.js.snap
Original file line number Diff line number Diff line change
Expand Up @@ -125022,7 +125022,6 @@ exports[`yarn several workspaces and asarUnpack 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
},
},
Expand Down
8 changes: 0 additions & 8 deletions test/snapshots/globTest.js.snap
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,8 @@ exports[`asarUnpack node_modules 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
},
"unpacked": true,
}
`;

Expand Down Expand Up @@ -28941,10 +28939,8 @@ exports[`unpackDir 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
},
"unpacked": true,
},
"b2": {
"files": {
Expand All @@ -28953,7 +28949,6 @@ exports[`unpackDir 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
"do-not-unpack-dir": {
"files": {
Expand Down Expand Up @@ -29060,10 +29055,8 @@ exports[`unpackDir one 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
},
"unpacked": true,
},
"b2": {
"files": {
Expand All @@ -29072,7 +29065,6 @@ exports[`unpackDir one 2`] = `
"unpacked": true,
},
},
"unpacked": true,
},
"do-not-unpack-dir": {
"files": {
Expand Down
3 changes: 2 additions & 1 deletion test/src/updater/blackboxUpdateTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,8 @@ async function runTest(context: TestContext, target: string, packageManager: str
// Move app update to the root directory of the server
await fs.copy(newAppDir.dir, rootDirectory, { recursive: true, overwrite: true })

const verifyAppVersion = async (expectedVersion: string) => await launchAndWaitForQuit({ appPath, timeoutMs: 2 * 60 * 1000, updateConfigPath, expectedVersion, packageManagerToTest: packageManager })
const verifyAppVersion = async (expectedVersion: string) =>
await launchAndWaitForQuit({ appPath, timeoutMs: 2 * 60 * 1000, updateConfigPath, expectedVersion, packageManagerToTest: packageManager })

const result = await verifyAppVersion(OLD_VERSION_NUMBER)
log.debug(result, "Test App version")
Expand Down