mirror of
https://github.com/aaddrick/claude-desktop-debian.git
synced 2026-05-17 00:26:21 +03:00
refactor: split build.sh into topical modules under scripts/
Splits the 2124-line build.sh into a 318-line orchestrator plus
16 topical modules, grouped so CODEOWNERS can assign per-subsystem
reviewers:
scripts/_common.sh shared shell utilities
scripts/setup/ host detection, deps, download
scripts/patches/ regex patches on minified JS
_common.sh extract_electron_variable etc.
app-asar.sh wrapper injection
titlebar.sh
tray.sh menu handler + icon selection
quick-window.sh
claude-code.sh
cowork.sh cowork linux patching (largest)
scripts/staging/ post-patch file staging
build.sh now sources each module in dependency order and retains
only run_packaging, cleanup_build, print_next_steps, and main.
All globals stay at the top of build.sh and are read by sourced
modules; each module's header documents which globals it reads and
mutates (implicit-contract documentation).
This is a pure-move refactor. Function bodies were copied verbatim
— verified by byte-identical diff of the function set vs the
pre-split build.sh (34 functions, all present with identical bodies).
Note: .github/workflows/shellcheck.yml may benefit from a '-x' flag
so shellcheck follows the new '# shellcheck source=' directives, but
that CI tweak is left as a separate concern.
Co-Authored-By: Claude <claude@anthropic.com>
This commit is contained in:
50
scripts/_common.sh
Normal file
50
scripts/_common.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#===============================================================================
|
||||
# Common shell utilities: logging, command checks, checksum verification.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: (none)
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
check_command() {
|
||||
if ! command -v "$1" &> /dev/null; then
|
||||
echo "$1 not found"
|
||||
return 1
|
||||
else
|
||||
echo "$1 found"
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
section_header() {
|
||||
echo -e "\033[1;36m--- $1 ---\033[0m"
|
||||
}
|
||||
|
||||
section_footer() {
|
||||
echo -e "\033[1;36m--- End $1 ---\033[0m"
|
||||
}
|
||||
|
||||
verify_sha256() {
|
||||
local file_path="$1"
|
||||
local expected_hash="$2"
|
||||
local label="${3:-file}"
|
||||
|
||||
if [[ -z $expected_hash ]]; then
|
||||
echo "Warning: No SHA-256 hash for ${label}," \
|
||||
'skipping verification' >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Verifying SHA-256 checksum for ${label}..."
|
||||
local actual_hash _
|
||||
read -r actual_hash _ < <(sha256sum "$file_path")
|
||||
|
||||
if [[ $actual_hash != "$expected_hash" ]]; then
|
||||
echo "SHA-256 mismatch for ${label}!" >&2
|
||||
echo " Expected: $expected_hash" >&2
|
||||
echo " Actual: $actual_hash" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "SHA-256 verified: ${label}"
|
||||
}
|
||||
56
scripts/patches/_common.sh
Normal file
56
scripts/patches/_common.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#===============================================================================
|
||||
# Shared patching helpers: dynamic extraction of minified variable names
|
||||
# and fix-ups that multiple tray/quick-window patches rely on.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: project_root
|
||||
# Modifies globals: electron_var, electron_var_re
|
||||
#===============================================================================
|
||||
|
||||
extract_electron_variable() {
|
||||
echo 'Extracting electron module variable name...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
electron_var=$(grep -oP '\$?\w+(?=\s*=\s*require\("electron"\))' \
|
||||
"$index_js" | head -1)
|
||||
if [[ -z $electron_var ]]; then
|
||||
electron_var=$(grep -oP '(?<=new )\$?\w+(?=\.Tray\b)' \
|
||||
"$index_js" | head -1)
|
||||
fi
|
||||
if [[ -z $electron_var ]]; then
|
||||
echo 'Failed to extract electron variable name' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
electron_var_re="${electron_var//\$/\\$}"
|
||||
echo " Found electron variable: $electron_var"
|
||||
echo '##############################################################'
|
||||
}
|
||||
|
||||
fix_native_theme_references() {
|
||||
echo 'Fixing incorrect nativeTheme variable references...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
local wrong_refs
|
||||
mapfile -t wrong_refs < <(
|
||||
grep -oP '\$?\w+(?=\.nativeTheme)' "$index_js" \
|
||||
| sort -u \
|
||||
| grep -Fxv "$electron_var" || true
|
||||
)
|
||||
|
||||
if (( ${#wrong_refs[@]} == 0 )); then
|
||||
echo ' All nativeTheme references are correct'
|
||||
echo '##############################################################'
|
||||
return
|
||||
fi
|
||||
|
||||
local ref ref_re
|
||||
for ref in "${wrong_refs[@]}"; do
|
||||
echo " Replacing: $ref.nativeTheme -> $electron_var.nativeTheme"
|
||||
ref_re="${ref//\$/\\$}"
|
||||
sed -i -E \
|
||||
"s/${ref_re}\.nativeTheme/${electron_var_re}.nativeTheme/g" \
|
||||
"$index_js"
|
||||
done
|
||||
echo '##############################################################'
|
||||
}
|
||||
97
scripts/patches/app-asar.sh
Normal file
97
scripts/patches/app-asar.sh
Normal file
@@ -0,0 +1,97 @@
|
||||
#===============================================================================
|
||||
# Top-level app.asar patch orchestration: extract, wrap entry point, stub
|
||||
# native module, copy i18n and tray icons, then invoke per-feature patches.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# claude_extract_dir, app_staging_dir, asar_exec, source_dir
|
||||
# Modifies globals: (none directly — delegated patches may mutate electron_var)
|
||||
#===============================================================================
|
||||
|
||||
patch_app_asar() {
|
||||
echo 'Processing app.asar...'
|
||||
cp "$claude_extract_dir/lib/net45/resources/app.asar" "$app_staging_dir/" || exit 1
|
||||
cp -a "$claude_extract_dir/lib/net45/resources/app.asar.unpacked" "$app_staging_dir/" || exit 1
|
||||
cd "$app_staging_dir" || exit 1
|
||||
"$asar_exec" extract app.asar app.asar.contents || exit 1
|
||||
|
||||
# Frame fix wrapper
|
||||
echo 'Creating BrowserWindow frame fix wrapper...'
|
||||
local original_main
|
||||
original_main=$(node -e "const pkg = require('./app.asar.contents/package.json'); console.log(pkg.main);")
|
||||
echo "Original main entry: $original_main"
|
||||
|
||||
cp "$source_dir/scripts/frame-fix-wrapper.js" app.asar.contents/frame-fix-wrapper.js || exit 1
|
||||
|
||||
cat > app.asar.contents/frame-fix-entry.js << EOFENTRY
|
||||
// Load frame fix first
|
||||
require('./frame-fix-wrapper.js');
|
||||
// Then load original main
|
||||
require('./${original_main}');
|
||||
EOFENTRY
|
||||
|
||||
# BrowserWindow frame/titleBarStyle patching is handled at runtime by
|
||||
# frame-fix-wrapper.js via a Proxy on require('electron'). No sed patches
|
||||
# needed — the wrapper detects popup vs main windows by their options and
|
||||
# applies frame:true/false accordingly.
|
||||
|
||||
# Update package.json
|
||||
echo 'Modifying package.json to load frame fix and add node-pty...'
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = require('./app.asar.contents/package.json');
|
||||
pkg.originalMain = pkg.main;
|
||||
pkg.main = 'frame-fix-entry.js';
|
||||
pkg.optionalDependencies = pkg.optionalDependencies || {};
|
||||
pkg.optionalDependencies['node-pty'] = '^1.0.0';
|
||||
fs.writeFileSync('./app.asar.contents/package.json', JSON.stringify(pkg, null, 2));
|
||||
console.log('Updated package.json: main entry and node-pty dependency');
|
||||
"
|
||||
|
||||
# Create stub native module
|
||||
echo 'Creating stub native module...'
|
||||
mkdir -p app.asar.contents/node_modules/@ant/claude-native || exit 1
|
||||
cp "$source_dir/scripts/claude-native-stub.js" \
|
||||
app.asar.contents/node_modules/@ant/claude-native/index.js || exit 1
|
||||
|
||||
mkdir -p app.asar.contents/resources/i18n || exit 1
|
||||
cp "$claude_extract_dir/lib/net45/resources/"*-*.json app.asar.contents/resources/i18n/ || exit 1
|
||||
|
||||
# Copy tray icons into asar so both packaged (process.resourcesPath)
|
||||
# and unpackaged (app.getAppPath()) code paths can find them
|
||||
cp "$claude_extract_dir/lib/net45/resources/Tray"* app.asar.contents/resources/ 2>/dev/null || \
|
||||
echo 'Warning: No tray icon files found for asar inclusion'
|
||||
|
||||
# Patch title bar detection
|
||||
patch_titlebar_detection
|
||||
|
||||
# Extract electron module variable name for tray patches
|
||||
extract_electron_variable
|
||||
|
||||
# Fix incorrect nativeTheme variable references
|
||||
fix_native_theme_references
|
||||
|
||||
# Patch tray menu handler
|
||||
patch_tray_menu_handler
|
||||
|
||||
# Patch tray icon selection
|
||||
patch_tray_icon_selection
|
||||
|
||||
# Patch menuBarEnabled to default to true when unset
|
||||
patch_menu_bar_default
|
||||
|
||||
# Patch quick window
|
||||
patch_quick_window
|
||||
|
||||
# Add Linux Claude Code support
|
||||
patch_linux_claude_code
|
||||
|
||||
# Patch Cowork mode for Linux (TypeScript VM client + Unix socket)
|
||||
patch_cowork_linux
|
||||
|
||||
# Copy cowork VM service daemon for Linux Cowork mode
|
||||
echo 'Installing cowork VM service daemon...'
|
||||
cp "$source_dir/scripts/cowork-vm-service.js" \
|
||||
app.asar.contents/cowork-vm-service.js || exit 1
|
||||
echo 'Cowork VM service daemon installed'
|
||||
}
|
||||
29
scripts/patches/claude-code.sh
Normal file
29
scripts/patches/claude-code.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#===============================================================================
|
||||
# Linux support in Claude Code's getHostPlatform: route linux-* bundles
|
||||
# through the normal platform switch instead of throwing.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: (none)
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
patch_linux_claude_code() {
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
if grep -q 'process.platform==="linux".*linux-arm64.*linux-x64' "$index_js"; then
|
||||
echo 'Linux claude code binary support already present'
|
||||
return
|
||||
fi
|
||||
|
||||
# New format (Claude >= 1.1.3541): getHostPlatform includes arch detection for win32
|
||||
# Pattern: if(process.platform==="win32")return e==="arm64"?"win32-arm64":"win32-x64";throw new Error(...)
|
||||
if grep -qP 'if\(process\.platform==="win32"\)return \w+==="arm64"\?"win32-arm64":"win32-x64";throw' "$index_js"; then
|
||||
sed -i -E 's/if\(process\.platform==="win32"\)return (\w+)==="arm64"\?"win32-arm64":"win32-x64";throw/if(process.platform==="win32")return \1==="arm64"?"win32-arm64":"win32-x64";if(process.platform==="linux")return \1==="arm64"?"linux-arm64":"linux-x64";throw/' "$index_js"
|
||||
echo 'Added linux claude code support (new arch-aware format)'
|
||||
# Old format (Claude <= 1.1.3363): no arch detection for win32
|
||||
elif grep -q 'if(process.platform==="win32")return"win32-x64";' "$index_js"; then
|
||||
sed -i 's/if(process.platform==="win32")return"win32-x64";/if(process.platform==="win32")return"win32-x64";if(process.platform==="linux")return process.arch==="arm64"?"linux-arm64":"linux-x64";/' "$index_js"
|
||||
echo 'Added linux claude code support (legacy format)'
|
||||
else
|
||||
echo 'Warning: Could not find getHostPlatform pattern to patch for Linux claude code support'
|
||||
fi
|
||||
}
|
||||
835
scripts/patches/cowork.sh
Normal file
835
scripts/patches/cowork.sh
Normal file
@@ -0,0 +1,835 @@
|
||||
#===============================================================================
|
||||
# Cowork-mode Linux patches (TypeScript VM client, Unix socket, daemon
|
||||
# auto-launch, smol-bin copy, sharedCwdPath forwarding, etc.) and node-pty
|
||||
# installation/staging for terminal support.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# node_pty_dir, work_dir, app_staging_dir
|
||||
# Modifies globals: node_pty_build_dir
|
||||
#===============================================================================
|
||||
|
||||
patch_cowork_linux() {
|
||||
echo 'Patching Cowork mode for Linux...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
if ! grep -q 'vmClient (TypeScript)' "$index_js"; then
|
||||
echo ' Cowork mode code not found in this version, skipping'
|
||||
echo '##############################################################'
|
||||
return
|
||||
fi
|
||||
|
||||
# All complex patches are done via node to avoid shell escaping issues
|
||||
# with minified JavaScript. Uses unique string anchors and dynamic
|
||||
# variable extraction to be version-agnostic per CLAUDE.md guidelines.
|
||||
if ! INDEX_JS="$index_js" SVC_PATH="cowork-vm-service.js" \
|
||||
node << 'COWORK_PATCH'
|
||||
const fs = require('fs');
|
||||
const indexJs = process.env.INDEX_JS;
|
||||
let code = fs.readFileSync(indexJs, 'utf8');
|
||||
let patchCount = 0;
|
||||
|
||||
// Helper: extract a balanced block starting at a delimiter.
|
||||
// Returns the substring from open to close (inclusive), or null.
|
||||
// Works for {} [] () by specifying the open char.
|
||||
function extractBlock(str, startIdx, open = '{') {
|
||||
const close = { '{': '}', '[': ']', '(': ')' }[open];
|
||||
const blockStart = str.indexOf(open, startIdx);
|
||||
if (blockStart === -1) return null;
|
||||
let depth = 1;
|
||||
let pos = blockStart + 1;
|
||||
while (depth > 0 && pos < str.length) {
|
||||
if (str[pos] === open) depth++;
|
||||
else if (str[pos] === close) depth--;
|
||||
pos++;
|
||||
}
|
||||
return depth === 0 ? str.substring(blockStart, pos) : null;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 1: Platform check - allow Linux through fz()
|
||||
// Pattern: VAR!=="darwin"&&VAR!=="win32" (unique in platform gate)
|
||||
// Anchor: appears near 'unsupported_platform' code value
|
||||
// ============================================================
|
||||
const platformGateRe = /(\w+)(\s*!==\s*"darwin"\s*&&\s*)\1(\s*!==\s*"win32")/g;
|
||||
const origCode = code;
|
||||
code = code.replace(platformGateRe, (match, varName, mid, end) => {
|
||||
// Only patch the instance near the "unsupported_platform" code value
|
||||
const matchIdx = origCode.indexOf(match);
|
||||
const nearbyText = origCode.substring(matchIdx, matchIdx + 200);
|
||||
if (nearbyText.includes('unsupported_platform') || nearbyText.includes('Unsupported platform')) {
|
||||
return `${varName}${mid}${varName}${end}&&${varName}!=="linux"`;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
if (code !== origCode) {
|
||||
console.log(' Patched platform check to allow Linux');
|
||||
patchCount++;
|
||||
} else {
|
||||
// Try without backreference (in case minifier uses different var names)
|
||||
const simpleRe = /(!=="darwin"\s*&&\s*\w+\s*!=="win32")([\s\S]{0,200}unsupported_platform)/;
|
||||
const simpleMatch = code.match(simpleRe);
|
||||
if (simpleMatch) {
|
||||
const varMatch = simpleMatch[0].match(/(\w+)\s*!==\s*"win32"/);
|
||||
if (varMatch) {
|
||||
code = code.replace(simpleMatch[1],
|
||||
simpleMatch[1] + '&&' + varMatch[1] + '!=="linux"');
|
||||
console.log(' Patched platform check to allow Linux (fallback)');
|
||||
patchCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (code === origCode) {
|
||||
console.error('FATAL: Failed to patch cowork platform gate for Linux.');
|
||||
console.error('The app will crash at startup without this patch.');
|
||||
console.error('The platform check pattern or nearby anchor text may have changed.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 2: Module loading - use TypeScript VM client on Linux
|
||||
// Anchor: unique string "vmClient (TypeScript)"
|
||||
// Extracts the win32 platform variable, adds Linux OR condition
|
||||
// ============================================================
|
||||
const vmClientLogMatch = code.match(/(\w+)(\s*\?\s*"vmClient \(TypeScript\)")/);
|
||||
if (vmClientLogMatch) {
|
||||
const win32Var = vmClientLogMatch[1];
|
||||
|
||||
// 2a: Patch the log/description line
|
||||
// FROM: WIN32VAR?"vmClient (TypeScript)"
|
||||
// TO: (WIN32VAR||process.platform==="linux")?"vmClient (TypeScript)"
|
||||
// Use negative lookbehind to avoid double-patching
|
||||
const logRe = new RegExp(
|
||||
'(?<!\\|\\|process\\.platform==="linux"\\))' +
|
||||
win32Var.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') +
|
||||
'(\\s*\\?\\s*"vmClient \\(TypeScript\\)")'
|
||||
);
|
||||
if (logRe.test(code)) {
|
||||
code = code.replace(logRe,
|
||||
'(' + win32Var + '||process.platform==="linux")$1');
|
||||
console.log(' Patched VM client log check for Linux');
|
||||
patchCount++;
|
||||
}
|
||||
|
||||
// 2b: Patch the actual module assignment
|
||||
// Beautified: WIN32VAR ? (df = { vm: bYe }) : (df = ...)
|
||||
// Minified: WIN32VAR?df={vm:bYe}:df=...
|
||||
// Handle both: outer parens are optional in minified code
|
||||
const assignRe = new RegExp(
|
||||
'(?<!\\|\\|process\\.platform==="linux"\\)?)' +
|
||||
win32Var.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') +
|
||||
'(\\s*\\?\\s*\\(?\\s*\\w+\\s*=\\s*\\{\\s*vm\\s*:\\s*\\w+\\s*\\}\\s*\\)?)'
|
||||
);
|
||||
if (assignRe.test(code)) {
|
||||
code = code.replace(assignRe,
|
||||
'(' + win32Var + '||process.platform==="linux")$1');
|
||||
console.log(' Patched VM module assignment for Linux');
|
||||
patchCount++;
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find vmClient variable for module loading patch');
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 3: Socket path - use Unix domain socket on Linux
|
||||
// Anchor: unique string "cowork-vm-service" in pipe path
|
||||
// ============================================================
|
||||
const pipeMatch = code.match(/(\w+)(\s*=\s*)"([^"]*\\\\[^"]*cowork-vm-service[^"]*)"/);
|
||||
if (pipeMatch) {
|
||||
const pipeVar = pipeMatch[1];
|
||||
const assign = pipeMatch[2];
|
||||
const pipeStr = pipeMatch[3];
|
||||
const oldExpr = pipeVar + assign + '"' + pipeStr + '"';
|
||||
const newExpr = pipeVar + assign +
|
||||
'process.platform==="linux"?' +
|
||||
'(process.env.XDG_RUNTIME_DIR||"/tmp")+"/cowork-vm-service.sock"' +
|
||||
':"' + pipeStr + '"';
|
||||
code = code.replace(oldExpr, newExpr);
|
||||
console.log(' Patched socket path for Linux Unix domain socket');
|
||||
patchCount++;
|
||||
} else {
|
||||
console.log(' WARNING: Could not find pipe path for socket patch');
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 4: Bundle manifest - add empty Linux entries to files
|
||||
// The linux key MUST exist to prevent TypeError when the app
|
||||
// accesses files["linux"]["x64"] during cowork status checks.
|
||||
// Empty arrays mean no VM files are downloaded — this is correct
|
||||
// because the VM backend is non-functional on Linux (bwrap is
|
||||
// the only working backend and doesn't use VM files).
|
||||
// Note: [].every() returns true (vacuous truth), so iBA() reports
|
||||
// that VM files are present. That makes the download() IPC
|
||||
// short-circuit without fetching anything, which is the intent
|
||||
// here. Patch 4b handles the downstream side-effect on
|
||||
// getDownloadStatus() so the Cowork tab doesn't auto-select on
|
||||
// every launch (#341).
|
||||
// ============================================================
|
||||
if (!code.includes('"linux":{') && !code.includes("'linux':{") &&
|
||||
!code.includes('linux:{')) {
|
||||
const shaRe = /sha\s*:\s*"([a-f0-9]{40})"/;
|
||||
const shaMatch = code.match(shaRe);
|
||||
if (shaMatch) {
|
||||
const shaIdx = code.indexOf(shaMatch[0]);
|
||||
const afterSha = code.indexOf('files', shaIdx);
|
||||
if (afterSha !== -1 && afterSha - shaIdx < 200) {
|
||||
const filesBlock = extractBlock(code, afterSha, '{');
|
||||
if (filesBlock) {
|
||||
const filesEnd = code.indexOf(filesBlock, afterSha)
|
||||
+ filesBlock.length;
|
||||
const insertPos = filesEnd - 1;
|
||||
const linuxEntry = ',linux:{x64:[],arm64:[]}';
|
||||
code = code.substring(0, insertPos) +
|
||||
linuxEntry + code.substring(insertPos);
|
||||
console.log(' Added empty Linux entries to' +
|
||||
' bundle manifest (VM download disabled)');
|
||||
patchCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!code.includes('linux:{x64:')) {
|
||||
console.log(' WARNING: Could not add Linux bundle' +
|
||||
' manifest entries');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 4b: Suppress Cowork tab auto-selection on launch (#341)
|
||||
// Anchor: getDownloadStatus() method with readable enum property
|
||||
// names (.Downloading, .Ready, .NotDownloaded) — stable
|
||||
// across minifier releases.
|
||||
//
|
||||
// Patch 4's vacuous-truth workaround makes iBA() report that VM
|
||||
// files are "ready", which is what short-circuits the download
|
||||
// path. The side-effect is that getDownloadStatus() also returns
|
||||
// Ready on every startup, and the remote web app treats a
|
||||
// startup observation of Ready as the "download just finished"
|
||||
// transition that auto-navigates to Cowork on macOS/Windows.
|
||||
// Linux users hit that transition on every launch.
|
||||
//
|
||||
// Fix: return NotDownloaded on Linux from getDownloadStatus().
|
||||
// iBA() is left alone so download() still short-circuits, and
|
||||
// clicking the Cowork tab still works (the web app's setup flow
|
||||
// calls download() which returns success immediately).
|
||||
// ============================================================
|
||||
{
|
||||
const statusRe = /getDownloadStatus\(\)\{return\s+(\w+\(\)\?(\w+)\.Downloading:\w+\(\)\?\2\.Ready:\2\.NotDownloaded)\}/;
|
||||
const statusMatch = code.match(statusRe);
|
||||
if (statusMatch) {
|
||||
const [whole, origExpr, enumVar] = statusMatch;
|
||||
const replacement =
|
||||
'getDownloadStatus(){return process.platform==="linux"?' +
|
||||
enumVar + '.NotDownloaded:' + origExpr + '}';
|
||||
code = code.replace(whole, replacement);
|
||||
console.log(' Patched getDownloadStatus to return ' +
|
||||
'NotDownloaded on Linux (suppresses auto-nav, #341)');
|
||||
patchCount++;
|
||||
} else if (code.includes(
|
||||
'getDownloadStatus(){return process.platform==="linux"?'
|
||||
)) {
|
||||
console.log(' Cowork auto-nav suppression already applied');
|
||||
} else {
|
||||
console.log(' WARNING: Could not find getDownloadStatus' +
|
||||
' pattern for auto-nav suppression (#341)');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 5: MSIX check bypass for Linux
|
||||
// The fz() function checks: if(t==="win32"&&!ga()) for MSIX
|
||||
// This is already gated to win32, so no change needed.
|
||||
// ============================================================
|
||||
|
||||
// ============================================================
|
||||
// Patch 6: Auto-launch service daemon on first connection attempt
|
||||
// Anchor: unique string "VM service not running. The service failed to start."
|
||||
//
|
||||
// The retry loop only retries on ENOENT (socket missing). On Linux,
|
||||
// stale sockets from a previous session give ECONNREFUSED instead,
|
||||
// which causes an immediate throw with no retry or auto-launch.
|
||||
//
|
||||
// Fix: patch the ENOENT check to also match ECONNREFUSED on Linux,
|
||||
// then inject auto-launch before the retry delay.
|
||||
//
|
||||
// The auto-launch uses a timestamp-based cooldown (_lastSpawn) instead
|
||||
// of a one-shot boolean so the daemon can be re-spawned after it dies
|
||||
// mid-session (issue #408). 10s cooldown prevents fork storms on hard
|
||||
// failures while allowing recovery on the next retry iteration.
|
||||
//
|
||||
// stdout/stderr of the forked daemon is piped to
|
||||
// ~/.config/Claude/logs/cowork_vm_daemon.log so crashes are no longer
|
||||
// silent. Falls back to "ignore" if the log dir can't be opened.
|
||||
// ============================================================
|
||||
const serviceErrorStr = 'VM service not running. The service failed to start.';
|
||||
const serviceErrorIdx = code.indexOf(serviceErrorStr);
|
||||
if (serviceErrorIdx !== -1) {
|
||||
// Step 1: Find the ENOENT check and expand it to include ECONNREFUSED
|
||||
// Pattern: VAR.code==="ENOENT"
|
||||
// Search backwards from the error string to find it
|
||||
const searchStart = Math.max(0, serviceErrorIdx - 300);
|
||||
const beforeRegion = code.substring(searchStart, serviceErrorIdx);
|
||||
const enoentRe = /(\w+)\.code\s*===\s*"ENOENT"/g;
|
||||
let enoentMatch;
|
||||
let lastEnoent = null;
|
||||
while ((enoentMatch = enoentRe.exec(beforeRegion)) !== null) {
|
||||
lastEnoent = enoentMatch;
|
||||
}
|
||||
if (lastEnoent) {
|
||||
const enoentStr = lastEnoent[0];
|
||||
const errVar = lastEnoent[1];
|
||||
const enoentAbsIdx = searchStart + lastEnoent.index;
|
||||
// Replace: VAR.code==="ENOENT"
|
||||
// With: (VAR.code==="ENOENT"||process.platform==="linux"&&VAR.code==="ECONNREFUSED")
|
||||
const expanded =
|
||||
'(' + enoentStr +
|
||||
'||process.platform==="linux"&&' + errVar + '.code==="ECONNREFUSED")';
|
||||
code = code.substring(0, enoentAbsIdx) +
|
||||
expanded +
|
||||
code.substring(enoentAbsIdx + enoentStr.length);
|
||||
console.log(' Expanded ENOENT check to include ECONNREFUSED on Linux');
|
||||
} else {
|
||||
console.log(' WARNING: Could not find ENOENT check for ECONNREFUSED expansion');
|
||||
}
|
||||
|
||||
// Step 2: Inject auto-launch before the retry delay
|
||||
// Re-find serviceErrorStr since indices shifted after step 1
|
||||
const newServiceErrorIdx = code.indexOf(serviceErrorStr);
|
||||
const searchEnd = Math.min(code.length, newServiceErrorIdx + 300);
|
||||
const searchRegion = code.substring(newServiceErrorIdx, searchEnd);
|
||||
const retryMatch = searchRegion.match(
|
||||
/await new Promise\((\w+)=>\s*setTimeout\(\1,\s*(\w+)\)\)/
|
||||
);
|
||||
if (retryMatch) {
|
||||
const retryStr = retryMatch[0];
|
||||
const retryOffset = searchRegion.indexOf(retryStr);
|
||||
const retryAbsIdx = newServiceErrorIdx + retryOffset;
|
||||
// Inject auto-launch before the retry delay
|
||||
// Service script is in app.asar.unpacked/ (not inside asar, since
|
||||
// child_process cannot execute scripts from inside an asar).
|
||||
// Uses fork() instead of spawn() because process.execPath in Electron
|
||||
// is the Electron binary - spawn would trigger "file open" handling
|
||||
// instead of executing the script as Node.js.
|
||||
const svcPath = process.env.SVC_PATH || 'cowork-vm-service.js';
|
||||
// Extract the enclosing function name (Ma or whatever it's
|
||||
// minified to) so the dedup guard attaches to it
|
||||
const funcSearchStart = Math.max(0, newServiceErrorIdx - 2000);
|
||||
const funcRegion = code.substring(funcSearchStart, newServiceErrorIdx);
|
||||
// The function is defined as: async function NAME(t,e){...for(let r=0;r<=LIMIT;r++)
|
||||
const funcNameRe = /async function (\w+)\s*\(\s*\w+\s*,\s*\w+\s*\)\s*\{[\s\S]*?for\s*\(\s*let/g;
|
||||
let funcMatch;
|
||||
let retryFuncName = null;
|
||||
while ((funcMatch = funcNameRe.exec(funcRegion)) !== null) {
|
||||
retryFuncName = funcMatch[1];
|
||||
}
|
||||
const spawnGuard = retryFuncName
|
||||
? retryFuncName + '._lastSpawn'
|
||||
: '_globalLastSpawn';
|
||||
// Cooldown in ms — long enough to avoid fork storms, short enough
|
||||
// that the retry loop can re-spawn after a mid-session daemon death.
|
||||
const autoLaunch =
|
||||
'process.platform==="linux"&&' +
|
||||
'(!' + spawnGuard + '||Date.now()-' + spawnGuard + '>1e4)' +
|
||||
'&&(' + spawnGuard + '=Date.now(),' +
|
||||
'(()=>{try{' +
|
||||
'const _p=require("path"),_fs=require("fs");' +
|
||||
'const _d=_p.join(process.resourcesPath,' +
|
||||
'"app.asar.unpacked","' + svcPath + '");' +
|
||||
'if(_fs.existsSync(_d)){' +
|
||||
// Open daemon log for append; fall back to ignoring stdio.
|
||||
'let _stdio="ignore";' +
|
||||
'try{' +
|
||||
'const _ld=_p.join(process.env.HOME||"/tmp",' +
|
||||
'".config/Claude/logs");' +
|
||||
'_fs.mkdirSync(_ld,{recursive:true});' +
|
||||
'const _fd=_fs.openSync(' +
|
||||
'_p.join(_ld,"cowork_vm_daemon.log"),"a");' +
|
||||
'_stdio=["ignore",_fd,_fd,"ipc"]' +
|
||||
'}catch(_){}' +
|
||||
'const _c=require("child_process").fork(_d,[],' +
|
||||
'{detached:true,stdio:_stdio,env:{...process.env,' +
|
||||
'ELECTRON_RUN_AS_NODE:"1"}});' +
|
||||
'global.__coworkDaemonPid=_c.pid;_c.unref()}' +
|
||||
'}catch(_e){console.error("[cowork-autolaunch]",_e)}})()),';
|
||||
code = code.substring(0, retryAbsIdx) +
|
||||
autoLaunch + code.substring(retryAbsIdx);
|
||||
console.log(' Added service daemon auto-launch on Linux');
|
||||
patchCount++;
|
||||
} else {
|
||||
console.log(' WARNING: Could not find retry delay for auto-launch patch');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find VM service error string for auto-launch');
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 6b: Extend auto-reinstall delete list (issue #408)
|
||||
// Anchor: const NAME=["rootfs.img",...] — the module-level array
|
||||
// driving the reinstall-files cleanup in _ue()/deleteVMBundle().
|
||||
//
|
||||
// Upstream preserves sessiondata.img and rootfs.img.zst across
|
||||
// auto-reinstall to avoid re-download. On 1.2773.0, preserving
|
||||
// them puts the daemon into an unstartable state that persists
|
||||
// across app restarts and OS reboots. Trade-off: next startup
|
||||
// re-downloads/re-extracts these files. This only runs on the
|
||||
// auto-reinstall path (already in a failed state), so biasing
|
||||
// toward recovery over re-download avoidance is correct.
|
||||
// ============================================================
|
||||
{
|
||||
const reinstallArrRe = /const (\w+)=\[("rootfs\.img"[^\]]*)\];/;
|
||||
const arrMatch = code.match(reinstallArrRe);
|
||||
if (arrMatch) {
|
||||
const [whole, name, contents] = arrMatch;
|
||||
const additions = [];
|
||||
if (!contents.includes('"sessiondata.img"')) {
|
||||
additions.push('"sessiondata.img"');
|
||||
}
|
||||
if (!contents.includes('"rootfs.img.zst"')) {
|
||||
additions.push('"rootfs.img.zst"');
|
||||
}
|
||||
if (additions.length) {
|
||||
const newContents = contents + ',' + additions.join(',');
|
||||
code = code.replace(
|
||||
whole,
|
||||
'const ' + name + '=[' + newContents + '];'
|
||||
);
|
||||
console.log(' Added VM images to reinstall delete list');
|
||||
patchCount++;
|
||||
} else {
|
||||
console.log(' Reinstall delete list already includes VM images');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find reinstall file list array');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 7: Skip Windows-specific smol-bin.vhdx copy on Linux
|
||||
// The code already checks: if(process.platform==="win32")
|
||||
// No change needed - win32-gated code is skipped on Linux.
|
||||
// ============================================================
|
||||
|
||||
// ============================================================
|
||||
// Patch 8: VM download tmpdir fix for Linux
|
||||
// On Linux, os.tmpdir() returns /tmp which is often a small
|
||||
// tmpfs (3-4GB). The VM rootfs download decompresses to ~9GB,
|
||||
// causing ENOSPC. Patch to use the bundle directory (on real
|
||||
// disk) instead of tmpfs for the download temp files.
|
||||
// Anchor: unique string "wvm-" in mkdtemp call
|
||||
// Strategy: find the bundle dir variable from nearby mkdir(),
|
||||
// then replace tmpdir() with that variable in the mkdtemp call.
|
||||
// ============================================================
|
||||
{
|
||||
// Find: MKDTEMP(PATH.join(OS.tmpdir(), "wvm-"))
|
||||
// The bundle dir var is used in mkdir(VAR, ...) just before
|
||||
const mkdtempRe = /(\w+)\.mkdtemp\(\s*(\w+)\.join\(\s*(\w+)\.tmpdir\(\)\s*,\s*"wvm-"\s*\)\s*\)/;
|
||||
const mkdtempMatch = code.match(mkdtempRe);
|
||||
if (mkdtempMatch) {
|
||||
const [fullMatch, fsVar, pathVar, osVar] = mkdtempMatch;
|
||||
// Find the bundle dir variable: mkdir(VAR, { recursive before wvm-
|
||||
const mkdtempIdx = code.indexOf(fullMatch);
|
||||
const searchStart = Math.max(0, mkdtempIdx - 2000);
|
||||
const before = code.substring(searchStart, mkdtempIdx);
|
||||
// Look for: mkdir(VARNAME, { recursive
|
||||
const mkdirRe = /(\w+)\.mkdir\(\s*(\w+)\s*,\s*\{\s*recursive/g;
|
||||
let bundleVar = null;
|
||||
let lastMkdir;
|
||||
while ((lastMkdir = mkdirRe.exec(before)) !== null) {
|
||||
bundleVar = lastMkdir[2];
|
||||
}
|
||||
if (bundleVar) {
|
||||
// Replace os.tmpdir() with the bundle dir variable
|
||||
// On Linux, use the bundle dir; on other platforms keep tmpdir
|
||||
const replacement =
|
||||
`${fsVar}.mkdtemp(${pathVar}.join(` +
|
||||
`process.platform==="linux"?${bundleVar}:${osVar}.tmpdir(),` +
|
||||
`"wvm-"))`;
|
||||
code = code.substring(0, mkdtempIdx) + replacement +
|
||||
code.substring(mkdtempIdx + fullMatch.length);
|
||||
console.log(' Patched VM download temp dir to use bundle path on Linux');
|
||||
patchCount++;
|
||||
} else {
|
||||
console.log(' WARNING: Could not find bundle dir variable for tmpdir patch');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find mkdtemp("wvm-") for tmpdir patch');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 9: Copy smol-bin VHDX on Linux
|
||||
// The win32 block copies smol-bin then calls _.configure()
|
||||
// (Windows HCS setup) which causes "Request timed out" on
|
||||
// Linux (#315). Inject a separate Linux block after the win32
|
||||
// block that only does the smol-bin copy.
|
||||
// Variable names are extracted dynamically from the win32 block
|
||||
// since minified names change between releases (#344).
|
||||
// ============================================================
|
||||
{
|
||||
const anchor = '"[VM:start] Windows VM service configured"';
|
||||
const anchorIdx = code.indexOf(anchor);
|
||||
if (anchorIdx !== -1) {
|
||||
// Find the "}" closing the win32 if-block after the anchor
|
||||
const closingBrace = code.indexOf('}', anchorIdx + anchor.length);
|
||||
if (closingBrace !== -1) {
|
||||
// Extract minified variable names from the win32 block
|
||||
// Search backwards from anchor to find the win32 block
|
||||
const regionStart = Math.max(0, anchorIdx - 1000);
|
||||
const region = code.substring(regionStart, anchorIdx);
|
||||
|
||||
// JS identifier may start with $, _, or letter; \w doesn't
|
||||
// match $ so use [$\w]+ to capture vars like `$e` (Claude
|
||||
// >= 1.3109.0 uses $e for the fs module to avoid collision
|
||||
// with the parameter `e`). See issue #418.
|
||||
// path var: VAR.join(process.resourcesPath,
|
||||
const pathMatch = region.match(
|
||||
/([$\w]+)\.join\(\s*process\.resourcesPath\s*,/
|
||||
);
|
||||
// fs var: VAR.existsSync(
|
||||
const fsMatch = region.match(/([$\w]+)\.existsSync\(/);
|
||||
// logger var: VAR.info("[VM:start]
|
||||
const logMatch = region.match(
|
||||
/([$\w]+)\.info\(\s*[`"]\[VM:start\]/
|
||||
);
|
||||
// stream/pipeline var: VAR.pipeline(
|
||||
const streamMatch = region.match(/([$\w]+)\.pipeline\(/);
|
||||
// arch function: const VAR=FUNC(), used in smol-bin
|
||||
const archMatch = region.match(
|
||||
/const\s+([$\w]+)\s*=\s*([$\w]+)\(\)\s*,\s*[$\w]+\s*=\s*[$\w]+\.join/
|
||||
);
|
||||
// bundlePath var: PATH.join(VAR,"smol-bin.vhdx")
|
||||
const bundleMatch = region.match(
|
||||
/\.join\(\s*([$\w]+)\s*,\s*"smol-bin\.vhdx"\s*\)/
|
||||
);
|
||||
|
||||
if (pathMatch && fsMatch && logMatch &&
|
||||
streamMatch && archMatch && bundleMatch) {
|
||||
const pathVar = pathMatch[1];
|
||||
const fsVar = fsMatch[1];
|
||||
const logVar = logMatch[1];
|
||||
const streamVar = streamMatch[1];
|
||||
const archFunc = archMatch[2];
|
||||
const bundleVar = bundleMatch[1];
|
||||
|
||||
const linuxBlock =
|
||||
'if(process.platform==="linux"){' +
|
||||
'const _la=' + archFunc + '(),' +
|
||||
'_ls=' + pathVar + '.join(process.resourcesPath,' +
|
||||
'`smol-bin.${_la}.vhdx`),' +
|
||||
'_ld=' + pathVar + '.join(' + bundleVar +
|
||||
',"smol-bin.vhdx");' +
|
||||
fsVar + '.existsSync(_ls)?' +
|
||||
'(' + logVar + '.info(' +
|
||||
'`[VM:start] Copying smol-bin.${_la}' +
|
||||
'.vhdx to bundle (Linux)`),' +
|
||||
'await ' + streamVar + '.pipeline(' +
|
||||
fsVar + '.createReadStream(_ls),' +
|
||||
fsVar + '.createWriteStream(_ld)),' +
|
||||
logVar + '.info(' +
|
||||
'`[VM:start] smol-bin.${_la}' +
|
||||
'.vhdx copied successfully`))' +
|
||||
':' + logVar + '.warn(' +
|
||||
'`[VM:start] smol-bin.${_la}' +
|
||||
'.vhdx not found at ${_ls}`)' +
|
||||
'}';
|
||||
// Defensive: if a future upstream emits its own
|
||||
// if(process.platform==="linux"){...} block right
|
||||
// after the win32 close brace, strip it before
|
||||
// injecting our correctly-wired linuxBlock so we
|
||||
// don't end up with two competing blocks.
|
||||
const insertPos = closingBrace + 1;
|
||||
let stripUntil = insertPos;
|
||||
const afterWin32 = code.substring(insertPos);
|
||||
const upstreamRe = /^\s*if\s*\(\s*process\.platform\s*===\s*"linux"\s*\)\s*\{/;
|
||||
const upstreamMatch = afterWin32.match(upstreamRe);
|
||||
if (upstreamMatch) {
|
||||
const matchEnd = insertPos + upstreamMatch[0].length;
|
||||
let depth = 1, pos = matchEnd;
|
||||
while (depth > 0 && pos < code.length) {
|
||||
if (code[pos] === '{') depth++;
|
||||
else if (code[pos] === '}') depth--;
|
||||
pos++;
|
||||
}
|
||||
if (depth === 0) {
|
||||
stripUntil = pos;
|
||||
console.log(' Stripped pre-existing upstream Linux block');
|
||||
} else {
|
||||
console.log(' WARNING: Upstream Linux block found but braces unbalanced; not stripping');
|
||||
}
|
||||
}
|
||||
code = code.substring(0, insertPos) +
|
||||
linuxBlock +
|
||||
code.substring(stripUntil);
|
||||
console.log(' Injected Linux smol-bin copy block (skips _.configure)');
|
||||
console.log(` vars: path=${pathVar} fs=${fsVar} log=${logVar} stream=${streamVar} arch=${archFunc} bundle=${bundleVar}`);
|
||||
patchCount++;
|
||||
} else {
|
||||
const missing = [];
|
||||
if (!pathMatch) missing.push('path');
|
||||
if (!fsMatch) missing.push('fs');
|
||||
if (!logMatch) missing.push('logger');
|
||||
if (!streamMatch) missing.push('stream');
|
||||
if (!archMatch) missing.push('arch');
|
||||
if (!bundleMatch) missing.push('bundlePath');
|
||||
console.log(` WARNING: Could not extract minified variable(s): ${missing.join(', ')}`);
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find closing brace after Windows VM service anchor');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find Windows VM service anchor for smol-bin patch');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 10: Register quit handler for cowork daemon cleanup
|
||||
// The upstream vm-shutdown handler uses a Swift addon unavailable
|
||||
// on Linux. Register our own to SIGTERM the daemon on app quit.
|
||||
// ============================================================
|
||||
{
|
||||
const quitFnRe = /registerQuitHandler:\s*(\w+)/;
|
||||
const quitFnMatch = code.match(quitFnRe);
|
||||
if (quitFnMatch) {
|
||||
const quitFn = quitFnMatch[1];
|
||||
console.log(' Found registerQuitHandler function: ' + quitFn);
|
||||
|
||||
const quitFnDef = 'function ' + quitFn + '(';
|
||||
const quitFnDefIdx = code.indexOf(quitFnDef);
|
||||
if (quitFnDefIdx !== -1) {
|
||||
const fnBlock = extractBlock(code, quitFnDefIdx, '{');
|
||||
if (fnBlock) {
|
||||
const insertIdx = code.indexOf(fnBlock, quitFnDefIdx) +
|
||||
fnBlock.length;
|
||||
const shutdownHandler =
|
||||
'process.platform==="linux"&&' + quitFn + '({' +
|
||||
'name:"cowork-linux-daemon-shutdown",' +
|
||||
'fn:async()=>{' +
|
||||
'const _p=global.__coworkDaemonPid;' +
|
||||
'if(!_p)return;' +
|
||||
'try{const _cmd=require("fs").readFileSync(' +
|
||||
'"/proc/"+_p+"/cmdline","utf8");' +
|
||||
'if(!_cmd.includes("cowork-vm-service"))return' +
|
||||
'}catch(_e){return}' +
|
||||
'try{process.kill(_p,"SIGTERM")}catch(_e){return}' +
|
||||
'for(let _i=0;_i<50;_i++){' +
|
||||
'await new Promise(_r=>setTimeout(_r,200));' +
|
||||
'try{process.kill(_p,0)}catch(_e){return}' +
|
||||
'}}});';
|
||||
code = code.substring(0, insertIdx) +
|
||||
shutdownHandler + code.substring(insertIdx);
|
||||
console.log(' Registered Linux cowork daemon quit handler');
|
||||
patchCount++;
|
||||
} else {
|
||||
console.log(' WARNING: Could not find ' + quitFn +
|
||||
' function body for quit handler');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find ' + quitFn +
|
||||
' function definition');
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: Could not find registerQuitHandler' +
|
||||
' export for quit handler');
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Patch 12: Forward user-selected folder as sharedCwdPath (#412)
|
||||
// The cowork-vm-service daemon honors a sharedCwdPath field on
|
||||
// the spawn IPC payload with priority over cwd (resolveWorkDir
|
||||
// in scripts/cowork-vm-service.js), but upstream never populates
|
||||
// it on Linux, so the daemon falls back to mountMap heuristics
|
||||
// (#389/#392/#411). Thread the user's folder through three sites:
|
||||
// 12a. getVMSpawnFunction({...}) config — inject sharedCwdPath.
|
||||
// 12b. Kyr() -> VMClient.spawn() call — forward as 13th arg.
|
||||
// 12c. spawn() body — accept trailing param, set on IPC payload.
|
||||
// Daemon-side mount heuristic from #392 remains as fallback.
|
||||
// ============================================================
|
||||
{
|
||||
// --- 12a: inject sharedCwdPath into getVMSpawnFunction config ---
|
||||
let site1Done = false;
|
||||
const cfgAnchor = 'this.getVMSpawnFunction(';
|
||||
const cfgIdx = code.indexOf(cfgAnchor);
|
||||
if (cfgIdx === -1) {
|
||||
console.log(' WARNING: #412 getVMSpawnFunction anchor not found');
|
||||
} else {
|
||||
// The argument is a {...} object literal; extract it directly.
|
||||
const cfgBlock = extractBlock(code, cfgIdx + cfgAnchor.length, '{');
|
||||
if (!cfgBlock) {
|
||||
console.log(' WARNING: #412 getVMSpawnFunction {...} not found');
|
||||
} else if (cfgBlock.includes('sharedCwdPath')) {
|
||||
console.log(' #412 sharedCwdPath already in spawn config');
|
||||
site1Done = true;
|
||||
} else {
|
||||
// The session-id var is the value of the first field
|
||||
// 'sessionId:VAR' in the config itself — cheap, scoped, and
|
||||
// immune to unrelated *.userSelectedFolders references (e.g.
|
||||
// loop variables) that wander into the enclosing scope.
|
||||
const sidMatch = cfgBlock.match(/\{sessionId:(\w+)\b/);
|
||||
if (!sidMatch) {
|
||||
console.log(' WARNING: #412 no sessionId field in config');
|
||||
} else {
|
||||
const sidVar = sidMatch[1];
|
||||
// Route through this.sessions.get() — canonical accessor
|
||||
// the same class already uses, so the injection survives
|
||||
// re-orderings of local vars in the enclosing function.
|
||||
const blockStart = code.indexOf(cfgBlock, cfgIdx);
|
||||
const insertAt = blockStart + cfgBlock.length - 1;
|
||||
const insertion = ',sharedCwdPath:this.sessions.get(' +
|
||||
sidVar + ')?.userSelectedFolders?.[0]';
|
||||
code = code.substring(0, insertAt) +
|
||||
insertion + code.substring(insertAt);
|
||||
console.log(' Injected sharedCwdPath into spawn' +
|
||||
' config (sessionId var: ' + sidVar + ')');
|
||||
patchCount++;
|
||||
site1Done = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- 12c: accept a 13th param in spawn() method body ---
|
||||
let site3Done = false;
|
||||
const spawnIdempotent =
|
||||
/async spawn\([^)]+\)\{const \w+=\{id:[^}]+\};[^{}]*\.sharedCwdPath=/;
|
||||
if (spawnIdempotent.test(code)) {
|
||||
console.log(' #412 spawn method already accepts sharedCwdPath');
|
||||
site3Done = true;
|
||||
} else {
|
||||
// Match the spawn body with the trailing mountConda setter and the
|
||||
// IPC call. Captures: arg list, payload var, setter chain, IPC tail.
|
||||
const spawnRe =
|
||||
/async spawn\(([^)]+)\)\{const (\w+)=\{id:[^}]+\};([^{}]*?\w+&&\(\2\.mountConda=\w+\)),(await \w+\("spawn",\2\)\})/;
|
||||
const spawnMatch = code.match(spawnRe);
|
||||
if (!spawnMatch) {
|
||||
console.log(' WARNING: #412 spawn method body regex did not match');
|
||||
} else {
|
||||
const [whole, argList, payloadVar, setters, tail] = spawnMatch;
|
||||
const argNames = new Set(argList.split(',').map(s =>
|
||||
s.split('=')[0].trim()));
|
||||
let param = null;
|
||||
for (const c of 'hHpPqQxXyYzZkKmMwW') {
|
||||
if (!argNames.has(c)) { param = c; break; }
|
||||
}
|
||||
if (!param) {
|
||||
console.log(' WARNING: #412 no unused letter for spawn param');
|
||||
} else {
|
||||
const newSetters = setters + ',' + param + '&&(' +
|
||||
payloadVar + '.sharedCwdPath=' + param + ')';
|
||||
const assembled = whole
|
||||
.replace('async spawn(' + argList + ')',
|
||||
'async spawn(' + argList + ',' + param + ')')
|
||||
.replace(setters + ',' + tail, newSetters + ',' + tail);
|
||||
code = code.slice(0, spawnMatch.index) + assembled +
|
||||
code.slice(spawnMatch.index + whole.length);
|
||||
console.log(' Extended spawn() with ' + param +
|
||||
' -> ' + payloadVar + '.sharedCwdPath setter');
|
||||
patchCount++;
|
||||
site3Done = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- 12b: forward SESSION.sharedCwdPath in Kyr -> spawn() call ---
|
||||
// Anchor: ',VAR.mountConda)' — expected unique to the 12-arg caller
|
||||
// (the shorter 10-arg one-shot call sites lack mountConda). Assert
|
||||
// the uniqueness so a second upstream caller wouldn't silently take
|
||||
// only the first hit.
|
||||
let site2Done = false;
|
||||
if (/,\w+\.mountConda,\w+\.sharedCwdPath\)/.test(code)) {
|
||||
console.log(' #412 caller already forwards sharedCwdPath');
|
||||
site2Done = true;
|
||||
} else {
|
||||
const callMatches = [...code.matchAll(/,(\w+)\.mountConda\)/g)];
|
||||
if (callMatches.length === 0) {
|
||||
console.log(' WARNING: #412 no ",VAR.mountConda)" pattern found');
|
||||
} else if (callMatches.length > 1) {
|
||||
console.log(' WARNING: #412 expected 1 ",VAR.mountConda)" match,' +
|
||||
' found ' + callMatches.length + '; skipping to avoid' +
|
||||
' wrong-site forwarding');
|
||||
} else {
|
||||
const [whole, sessionVar] = callMatches[0];
|
||||
code = code.replace(whole, ',' + sessionVar +
|
||||
'.mountConda,' + sessionVar + '.sharedCwdPath)');
|
||||
console.log(' Forwarded sharedCwdPath in Kyr->spawn call' +
|
||||
' (var: ' + sessionVar + ')');
|
||||
patchCount++;
|
||||
site2Done = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!site1Done || !site2Done || !site3Done) {
|
||||
console.log(' WARNING: #412 partial — site1=' + site1Done +
|
||||
' site2=' + site2Done + ' site3=' + site3Done +
|
||||
'; daemon fallback still active');
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(indexJs, code);
|
||||
console.log(` Applied ${patchCount} cowork patches`);
|
||||
if (patchCount < 5) {
|
||||
console.log(' WARNING: Some patches failed - Cowork mode may not work');
|
||||
}
|
||||
COWORK_PATCH
|
||||
then
|
||||
echo 'WARNING: Cowork Linux patches failed' >&2
|
||||
echo 'Cowork mode may not be available on Linux' >&2
|
||||
fi
|
||||
|
||||
echo '##############################################################'
|
||||
}
|
||||
|
||||
install_node_pty() {
|
||||
section_header 'Installing node-pty for terminal support'
|
||||
|
||||
local pty_src_dir=''
|
||||
|
||||
if [[ -n $node_pty_dir ]]; then
|
||||
# Use pre-built node-pty (e.g. from Nix)
|
||||
echo "Using pre-built node-pty from $node_pty_dir"
|
||||
pty_src_dir="$node_pty_dir"
|
||||
else
|
||||
# Build node-pty from npm
|
||||
node_pty_build_dir="$work_dir/node-pty-build"
|
||||
mkdir -p "$node_pty_build_dir" || exit 1
|
||||
cd "$node_pty_build_dir" || exit 1
|
||||
echo '{"name":"node-pty-build","version":"1.0.0","private":true}' > package.json
|
||||
|
||||
echo 'Installing node-pty (this compiles native module)...'
|
||||
if npm install node-pty 2>&1; then
|
||||
echo 'node-pty installed successfully'
|
||||
pty_src_dir="$node_pty_build_dir/node_modules/node-pty"
|
||||
else
|
||||
echo 'Failed to install node-pty - terminal features may not work'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n $pty_src_dir && -d $pty_src_dir ]]; then
|
||||
echo 'Copying node-pty JavaScript files into app.asar.contents...'
|
||||
mkdir -p "$app_staging_dir/app.asar.contents/node_modules/node-pty" || exit 1
|
||||
# --no-preserve=mode so read-only bits from the Nix store
|
||||
# (--node-pty-dir) don't propagate into the staging tree.
|
||||
cp -r --no-preserve=mode "$pty_src_dir/lib" \
|
||||
"$app_staging_dir/app.asar.contents/node_modules/node-pty/" || exit 1
|
||||
cp --no-preserve=mode "$pty_src_dir/package.json" \
|
||||
"$app_staging_dir/app.asar.contents/node_modules/node-pty/" || exit 1
|
||||
# Also stage build/ so `asar pack --unpack '**/*.node'` can
|
||||
# create a properly-tracked .unpacked entry. Without this,
|
||||
# the asar manifest has no node-pty/build/ entry and
|
||||
# Electron's asar->.unpacked redirect never fires, so
|
||||
# require('../build/Release/pty.node') from inside the asar
|
||||
# fails with MODULE_NOT_FOUND even when the binary exists
|
||||
# in app.asar.unpacked/.
|
||||
if [[ -d $pty_src_dir/build ]]; then
|
||||
cp -r --no-preserve=mode "$pty_src_dir/build" \
|
||||
"$app_staging_dir/app.asar.contents/node_modules/node-pty/" || exit 1
|
||||
echo 'node-pty build/ staged (will be unpacked during asar pack)'
|
||||
fi
|
||||
echo 'node-pty JavaScript files copied'
|
||||
elif [[ -z $pty_src_dir ]]; then
|
||||
echo 'node-pty source directory not set'
|
||||
else
|
||||
echo "node-pty directory not found: $pty_src_dir"
|
||||
fi
|
||||
|
||||
cd "$app_staging_dir" || exit 1
|
||||
section_footer 'node-pty installation'
|
||||
}
|
||||
143
scripts/patches/quick-window.sh
Normal file
143
scripts/patches/quick-window.sh
Normal file
@@ -0,0 +1,143 @@
|
||||
#===============================================================================
|
||||
# Quick-window patches: KDE-gated blur/focus workarounds for the pop-up menu
|
||||
# so the main window reappears after quick-entry submit.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: (none — all context is captured from index.js at runtime)
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
patch_quick_window() {
|
||||
echo 'Patching quick window for Linux...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
# Extract the quick window variable name from the unique "pop-up-menu"
|
||||
# setAlwaysOnTop call, e.g.: Sa.setAlwaysOnTop(!0,"pop-up-menu")
|
||||
local quick_var
|
||||
quick_var=$(grep -oP '\w+(?=\.setAlwaysOnTop\(\s*!0\s*,\s*"pop-up-menu"\))' \
|
||||
"$index_js" | head -1)
|
||||
if [[ -z $quick_var ]]; then
|
||||
echo 'WARNING: Could not extract quick window variable name'
|
||||
echo '##############################################################'
|
||||
return
|
||||
fi
|
||||
echo " Found quick window variable: $quick_var"
|
||||
|
||||
local quick_var_re="${quick_var//\$/\\$}"
|
||||
|
||||
# Part 1: Add blur() before hide() on the quick window so that
|
||||
# isFocused() returns false after hiding (Electron Linux bug on KDE).
|
||||
# The hide call sits after || (e.g. GUARD()||VAR.hide()), so both
|
||||
# calls must be wrapped in parens to preserve short-circuit semantics.
|
||||
# Gated to KDE only: on GNOME/Ubuntu the blur() regresses quick entry
|
||||
# (see #393), and the focus-stale bug doesn't manifest there.
|
||||
local de_check='(process.env.XDG_CURRENT_DESKTOP||"")'
|
||||
de_check+='.toLowerCase().includes("kde")'
|
||||
if grep -qF "${quick_var}.blur(),${quick_var}.hide()" "$index_js"; then
|
||||
echo ' Quick window blur already patched'
|
||||
elif grep -qP "\|\|${quick_var_re}\.hide\(\)" "$index_js"; then
|
||||
sed -i \
|
||||
"s/||${quick_var_re}\.hide()/||(${de_check}?(${quick_var}.blur(),${quick_var}.hide()):${quick_var}.hide())/g" \
|
||||
"$index_js"
|
||||
echo ' Added KDE-gated blur() before hide() on quick window'
|
||||
else
|
||||
echo ' WARNING: Could not find quick window hide() call'
|
||||
fi
|
||||
|
||||
# Part 2: Fix main window not appearing after quick entry submit.
|
||||
# On KDE, isFocused() can return stale true after hiding, causing
|
||||
# FOCUS_CHECK()||Lt.show() to skip the show. Gate the visibility-check
|
||||
# replacement to KDE only: on GNOME, the original focus check works
|
||||
# and replacing it regresses quick entry (see #393).
|
||||
if INDEX_JS="$index_js" node << 'QUICK_WINDOW_PATCH'
|
||||
const fs = require('fs');
|
||||
const indexJs = process.env.INDEX_JS;
|
||||
let code = fs.readFileSync(indexJs, 'utf8');
|
||||
let patchCount = 0;
|
||||
|
||||
// Find the minified isWindowFocused function via its named property
|
||||
// export: isWindowFocused: () => !!NAME()
|
||||
const focusedPropRe = /isWindowFocused:\s*\(\)\s*=>\s*!!(\w+)\(\)/;
|
||||
const focusedMatch = code.match(focusedPropRe);
|
||||
if (!focusedMatch) {
|
||||
console.log(' WARNING: Could not find isWindowFocused function');
|
||||
process.exit(0);
|
||||
}
|
||||
const focusFn = focusedMatch[1];
|
||||
console.log(' Found focus check function: ' + focusFn);
|
||||
|
||||
// Find the sibling isVisible function defined near the focus function
|
||||
const focusFnIdx = code.indexOf('function ' + focusFn + '(');
|
||||
const nearbyCode = code.substring(focusFnIdx, focusFnIdx + 500);
|
||||
const visFnRe = /function (\w+)\(\)\{return!\w+\|\|\w+\.isDestroyed\(\)\?!1:\w+\.isVisible\(\)/;
|
||||
const visMatch = nearbyCode.match(visFnRe);
|
||||
if (!visMatch) {
|
||||
console.log(' WARNING: Could not find visibility function near ' +
|
||||
focusFn);
|
||||
process.exit(0);
|
||||
}
|
||||
const visFn = visMatch[1];
|
||||
console.log(' Found visibility check function: ' + visFn);
|
||||
|
||||
// Anchor on unique QuickEntry log strings to patch only the right sites
|
||||
const anchors = [
|
||||
'Navigating to existing chat',
|
||||
'Creating new chat with submit_quick_entry',
|
||||
];
|
||||
for (const anchor of anchors) {
|
||||
const anchorIdx = code.indexOf(anchor);
|
||||
if (anchorIdx === -1) {
|
||||
console.log(' WARNING: anchor not found: ' + anchor);
|
||||
continue;
|
||||
}
|
||||
// Search region after anchor (1500 chars covers promise chains)
|
||||
const region = code.substring(anchorIdx, anchorIdx + 1500);
|
||||
// Idempotency: if region already contains the DE gate, skip
|
||||
if (region.indexOf('XDG_CURRENT_DESKTOP') !== -1) {
|
||||
console.log(' Quick entry show() already patched near "' +
|
||||
anchor.substring(0, 30) + '..."');
|
||||
continue;
|
||||
}
|
||||
const showRe = new RegExp(
|
||||
focusFn.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') +
|
||||
'\\(\\)\\|\\|(\\w+)\\.show\\(\\)'
|
||||
);
|
||||
const showMatch = region.match(showRe);
|
||||
if (showMatch) {
|
||||
const oldStr = showMatch[0];
|
||||
const mainWin = showMatch[1];
|
||||
// Gate the visibility check to KDE only; fall back to original
|
||||
// focus check on GNOME/other so #390 doesn't regress them (#393).
|
||||
const deCheck = '(process.env.XDG_CURRENT_DESKTOP||"")' +
|
||||
'.toLowerCase().includes("kde")';
|
||||
const newStr = '(' + deCheck + '?' + visFn + '():' +
|
||||
focusFn + '())||' + mainWin + '.show()';
|
||||
if (oldStr !== newStr) {
|
||||
const absIdx = anchorIdx + region.indexOf(oldStr);
|
||||
code = code.substring(0, absIdx) + newStr +
|
||||
code.substring(absIdx + oldStr.length);
|
||||
console.log(' KDE-gated ' + focusFn + '()/' + visFn +
|
||||
'() for show() near "' + anchor.substring(0, 30) + '..."');
|
||||
patchCount++;
|
||||
}
|
||||
} else {
|
||||
console.log(' WARNING: show() pattern not found near "' +
|
||||
anchor + '"');
|
||||
}
|
||||
}
|
||||
|
||||
if (patchCount > 0) {
|
||||
fs.writeFileSync(indexJs, code);
|
||||
console.log(' Patched ' + patchCount +
|
||||
' quick entry show() calls to use visibility check');
|
||||
} else {
|
||||
console.log(' WARNING: No quick entry show() calls patched');
|
||||
}
|
||||
QUICK_WINDOW_PATCH
|
||||
then
|
||||
echo 'Quick window patches applied'
|
||||
else
|
||||
echo 'WARNING: Quick window show patch failed' >&2
|
||||
fi
|
||||
echo '##############################################################'
|
||||
}
|
||||
44
scripts/patches/titlebar.sh
Normal file
44
scripts/patches/titlebar.sh
Normal file
@@ -0,0 +1,44 @@
|
||||
#===============================================================================
|
||||
# Title bar detection patch: strip the negation so Linux renders the frame.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: (none)
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
patch_titlebar_detection() {
|
||||
echo '##############################################################'
|
||||
echo "Removing '!' from 'if (\"!\"isWindows && isMainWindow) return null;'"
|
||||
echo 'detection flag to enable title bar'
|
||||
|
||||
local search_base='app.asar.contents/.vite/renderer/main_window/assets'
|
||||
local target_pattern='MainWindowPage-*.js'
|
||||
|
||||
echo "Searching for '$target_pattern' within '$search_base'..."
|
||||
local target_files
|
||||
mapfile -t target_files < <(find "$search_base" -type f -name "$target_pattern")
|
||||
local num_files=${#target_files[@]}
|
||||
|
||||
case $num_files in
|
||||
0)
|
||||
echo "Error: No file matching '$target_pattern' found within '$search_base'." >&2
|
||||
exit 1
|
||||
;;
|
||||
1)
|
||||
local target_file="${target_files[0]}"
|
||||
echo "Found target file: $target_file"
|
||||
sed -i -E 's/if\(!([a-zA-Z]+)[[:space:]]*&&[[:space:]]*([a-zA-Z]+)\)/if(\1 \&\& \2)/g' "$target_file"
|
||||
|
||||
if grep -q -E 'if\(![a-zA-Z]+[[:space:]]*&&[[:space:]]*[a-zA-Z]+\)' "$target_file"; then
|
||||
echo "Error: Failed to replace patterns in $target_file." >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Successfully replaced patterns in $target_file"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Expected exactly one file matching '$target_pattern' within '$search_base', but found $num_files." >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
echo '##############################################################'
|
||||
}
|
||||
120
scripts/patches/tray.sh
Normal file
120
scripts/patches/tray.sh
Normal file
@@ -0,0 +1,120 @@
|
||||
#===============================================================================
|
||||
# Tray-related patches: menu handler mutex/DBus delay, icon theme selection,
|
||||
# and menuBarEnabled default.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: project_root, electron_var, electron_var_re
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
patch_tray_menu_handler() {
|
||||
echo 'Patching tray menu handler...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
local tray_func tray_var first_const
|
||||
tray_func=$(grep -oP \
|
||||
'on\("menuBarEnabled",\(\)=>\{\K\w+(?=\(\)\})' "$index_js")
|
||||
if [[ -z $tray_func ]]; then
|
||||
echo 'Failed to extract tray menu function name' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo " Found tray function: $tray_func"
|
||||
|
||||
tray_var=$(grep -oP \
|
||||
"\}\);let \K\w+(?==null;(?:async )?function ${tray_func})" \
|
||||
"$index_js")
|
||||
if [[ -z $tray_var ]]; then
|
||||
echo 'Failed to extract tray variable name' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo " Found tray variable: $tray_var"
|
||||
|
||||
sed -i "s/function ${tray_func}(){/async function ${tray_func}(){/g" \
|
||||
"$index_js"
|
||||
|
||||
first_const=$(grep -oP \
|
||||
"async function ${tray_func}\(\)\{.*?const \K\w+(?==)" \
|
||||
"$index_js" | head -1)
|
||||
if [[ -z $first_const ]]; then
|
||||
echo 'Failed to extract first const in function' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo " Found first const variable: $first_const"
|
||||
|
||||
# Add mutex guard to prevent concurrent tray rebuilds
|
||||
if ! grep -q "${tray_func}._running" "$index_js"; then
|
||||
sed -i "s/async function ${tray_func}(){/async function ${tray_func}(){if(${tray_func}._running)return;${tray_func}._running=true;setTimeout(()=>${tray_func}._running=false,1500);/g" \
|
||||
"$index_js"
|
||||
echo " Added mutex guard to ${tray_func}()"
|
||||
fi
|
||||
|
||||
# Add DBus cleanup delay after tray destroy
|
||||
if ! grep -q "await new Promise.*setTimeout" "$index_js" \
|
||||
| grep -q "$tray_var"; then
|
||||
sed -i "s/${tray_var}\&\&(${tray_var}\.destroy(),${tray_var}=null)/${tray_var}\&\&(${tray_var}.destroy(),${tray_var}=null,await new Promise(r=>setTimeout(r,250)))/g" \
|
||||
"$index_js"
|
||||
echo " Added DBus cleanup delay after $tray_var.destroy()"
|
||||
fi
|
||||
|
||||
echo 'Tray menu handler patched'
|
||||
echo '##############################################################'
|
||||
|
||||
# Skip tray updates during startup (3 second window)
|
||||
echo 'Patching nativeTheme handler for startup delay...'
|
||||
if ! grep -q '_trayStartTime' "$index_js"; then
|
||||
sed -i -E \
|
||||
"s/(${electron_var_re}\.nativeTheme\.on\(\s*\"updated\"\s*,\s*\(\)\s*=>\s*\{)/let _trayStartTime=Date.now();\1/g" \
|
||||
"$index_js"
|
||||
sed -i -E \
|
||||
"s/\((\w+\([^)]*\))\s*,\s*${tray_func}\(\)\s*,/(\1,Date.now()-_trayStartTime>3e3\&\&${tray_func}(),/g" \
|
||||
"$index_js"
|
||||
echo ' Added startup delay check (3 second window)'
|
||||
fi
|
||||
echo '##############################################################'
|
||||
}
|
||||
|
||||
patch_tray_icon_selection() {
|
||||
echo 'Patching tray icon selection for Linux visibility...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
local dark_check="${electron_var_re}.nativeTheme.shouldUseDarkColors"
|
||||
|
||||
if grep -qP ':\$?\w+="TrayIconTemplate\.png"' "$index_js"; then
|
||||
sed -i -E \
|
||||
"s/:(\\\$?\w+)=\"TrayIconTemplate\.png\"/:\1=${dark_check}?\"TrayIconTemplate-Dark.png\":\"TrayIconTemplate.png\"/g" \
|
||||
"$index_js"
|
||||
echo 'Patched tray icon selection for Linux theme support'
|
||||
else
|
||||
echo 'Tray icon selection pattern not found or already patched'
|
||||
fi
|
||||
echo '##############################################################'
|
||||
}
|
||||
|
||||
patch_menu_bar_default() {
|
||||
echo 'Patching menuBarEnabled to default to true when unset...'
|
||||
local index_js='app.asar.contents/.vite/build/index.js'
|
||||
|
||||
local menu_bar_var
|
||||
menu_bar_var=$(grep -oP \
|
||||
'const \K\w+(?=\s*=\s*\w+\("menuBarEnabled"\))' \
|
||||
"$index_js" | head -1)
|
||||
if [[ -z $menu_bar_var ]]; then
|
||||
echo ' Could not extract menuBarEnabled variable name'
|
||||
echo '##############################################################'
|
||||
return
|
||||
fi
|
||||
echo " Found menuBarEnabled variable: $menu_bar_var"
|
||||
|
||||
# Change !!var to var!==false so undefined defaults to true
|
||||
if grep -qP ",\s*!!${menu_bar_var}\s*\)" "$index_js"; then
|
||||
sed -i -E \
|
||||
"s/,\s*!!${menu_bar_var}\s*\)/,${menu_bar_var}!==false)/g" \
|
||||
"$index_js"
|
||||
echo ' Patched menuBarEnabled to default to true'
|
||||
else
|
||||
echo ' menuBarEnabled pattern not found or already patched'
|
||||
fi
|
||||
echo '##############################################################'
|
||||
}
|
||||
257
scripts/setup/dependencies.sh
Normal file
257
scripts/setup/dependencies.sh
Normal file
@@ -0,0 +1,257 @@
|
||||
#===============================================================================
|
||||
# Dependency installation and work-directory/Node/Electron bootstrap.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# build_format, distro_family, work_dir, app_staging_dir, project_root,
|
||||
# architecture
|
||||
# Modifies globals:
|
||||
# chosen_electron_module_path, asar_exec (via setup_electron_asar);
|
||||
# PATH is exported (via setup_nodejs)
|
||||
#===============================================================================
|
||||
|
||||
check_dependencies() {
|
||||
echo 'Checking dependencies...'
|
||||
local deps_to_install=''
|
||||
local common_deps='p7zip wget wrestool icotool convert'
|
||||
local all_deps="$common_deps"
|
||||
|
||||
# Add format-specific dependencies
|
||||
case "$build_format" in
|
||||
deb) all_deps="$all_deps dpkg-deb" ;;
|
||||
rpm) all_deps="$all_deps rpmbuild" ;;
|
||||
esac
|
||||
|
||||
# Command-to-package mappings per distro family
|
||||
declare -A debian_pkgs=(
|
||||
[p7zip]='p7zip-full' [wget]='wget' [wrestool]='icoutils'
|
||||
[icotool]='icoutils' [convert]='imagemagick'
|
||||
[dpkg-deb]='dpkg-dev' [rpmbuild]='rpm'
|
||||
)
|
||||
declare -A rpm_pkgs=(
|
||||
[p7zip]='p7zip p7zip-plugins' [wget]='wget' [wrestool]='icoutils'
|
||||
[icotool]='icoutils' [convert]='ImageMagick'
|
||||
[dpkg-deb]='dpkg' [rpmbuild]='rpm-build'
|
||||
)
|
||||
|
||||
local cmd
|
||||
for cmd in $all_deps; do
|
||||
if ! check_command "$cmd"; then
|
||||
case "$distro_family" in
|
||||
debian)
|
||||
deps_to_install="$deps_to_install ${debian_pkgs[$cmd]}"
|
||||
;;
|
||||
rpm)
|
||||
deps_to_install="$deps_to_install ${rpm_pkgs[$cmd]}"
|
||||
;;
|
||||
*)
|
||||
echo "Warning: Cannot auto-install '$cmd' on unknown distro. Please install manually." >&2
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -n $deps_to_install ]]; then
|
||||
echo "System dependencies needed:$deps_to_install"
|
||||
|
||||
# Determine if we need sudo (skip if already root)
|
||||
local sudo_cmd='sudo'
|
||||
if (( EUID == 0 )); then
|
||||
sudo_cmd=''
|
||||
echo 'Installing as root (no sudo needed)...'
|
||||
else
|
||||
echo 'Attempting to install using sudo...'
|
||||
# Check if we can sudo without a password first
|
||||
if sudo -n true 2>/dev/null; then
|
||||
echo 'Passwordless sudo detected.'
|
||||
elif ! sudo -v; then
|
||||
echo 'Failed to validate sudo credentials. Please ensure you can run sudo.' >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
case "$distro_family" in
|
||||
debian)
|
||||
if ! $sudo_cmd apt update; then
|
||||
echo "Failed to run 'apt update'." >&2
|
||||
exit 1
|
||||
fi
|
||||
# shellcheck disable=SC2086
|
||||
if ! $sudo_cmd apt install -y $deps_to_install; then
|
||||
echo "Failed to install dependencies using 'apt install'." >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
rpm)
|
||||
# shellcheck disable=SC2086
|
||||
if ! $sudo_cmd dnf install -y $deps_to_install; then
|
||||
echo "Failed to install dependencies using 'dnf install'." >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Cannot auto-install dependencies on unknown distro." >&2
|
||||
echo "Please install these packages manually: $deps_to_install" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
echo 'System dependencies installed successfully.'
|
||||
fi
|
||||
}
|
||||
|
||||
setup_work_directory() {
|
||||
rm -rf "$work_dir"
|
||||
mkdir -p "$work_dir" || exit 1
|
||||
mkdir -p "$app_staging_dir" || exit 1
|
||||
}
|
||||
|
||||
setup_nodejs() {
|
||||
section_header 'Node.js Setup'
|
||||
echo 'Checking Node.js version...'
|
||||
|
||||
local node_version_ok=false
|
||||
if command -v node &> /dev/null; then
|
||||
local node_version node_major
|
||||
node_version=$(node --version | cut -d'v' -f2)
|
||||
node_major="${node_version%%.*}"
|
||||
echo "System Node.js version: v$node_version"
|
||||
|
||||
if (( node_major >= 20 )); then
|
||||
echo "System Node.js version is adequate (v$node_version)"
|
||||
node_version_ok=true
|
||||
else
|
||||
echo "System Node.js version is too old (v$node_version). Need v20+"
|
||||
fi
|
||||
else
|
||||
echo 'Node.js not found in system'
|
||||
fi
|
||||
|
||||
if [[ $node_version_ok == true ]]; then
|
||||
section_footer 'Node.js Setup'
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Node.js version inadequate - install locally
|
||||
echo 'Installing Node.js v20 locally in build directory...'
|
||||
|
||||
local node_arch
|
||||
case "$architecture" in
|
||||
amd64) node_arch='x64' ;;
|
||||
arm64) node_arch='arm64' ;;
|
||||
*)
|
||||
echo "Unsupported architecture for Node.js: $architecture" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
local node_version_to_install='20.18.1'
|
||||
local node_tarball="node-v${node_version_to_install}-linux-${node_arch}.tar.xz"
|
||||
local node_url="https://nodejs.org/dist/v${node_version_to_install}/${node_tarball}"
|
||||
local node_install_dir="$work_dir/node"
|
||||
|
||||
echo "Downloading Node.js v${node_version_to_install} for ${node_arch}..."
|
||||
cd "$work_dir" || exit 1
|
||||
if ! wget -O "$node_tarball" "$node_url"; then
|
||||
echo "Failed to download Node.js from $node_url" >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify against official Node.js checksums
|
||||
local shasums_url node_expected_sha256
|
||||
shasums_url="https://nodejs.org/dist/v${node_version_to_install}/SHASUMS256.txt"
|
||||
node_expected_sha256=$(
|
||||
wget -qO- "$shasums_url" \
|
||||
| grep -F "$node_tarball" \
|
||||
| awk '{print $1}'
|
||||
) || true
|
||||
|
||||
if ! verify_sha256 "$work_dir/$node_tarball" \
|
||||
"$node_expected_sha256" 'Node.js tarball'; then
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo 'Extracting Node.js...'
|
||||
if ! tar -xf "$node_tarball"; then
|
||||
echo 'Failed to extract Node.js tarball' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mv "node-v${node_version_to_install}-linux-${node_arch}" "$node_install_dir" || exit 1
|
||||
export PATH="$node_install_dir/bin:$PATH"
|
||||
|
||||
if command -v node &> /dev/null; then
|
||||
echo "Local Node.js installed successfully: $(node --version)"
|
||||
else
|
||||
echo 'Failed to install local Node.js' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$node_tarball"
|
||||
cd "$project_root" || exit 1
|
||||
section_footer 'Node.js Setup'
|
||||
}
|
||||
|
||||
setup_electron_asar() {
|
||||
section_header 'Electron & Asar Handling'
|
||||
|
||||
echo "Ensuring local Electron and Asar installation in $work_dir..."
|
||||
cd "$work_dir" || exit 1
|
||||
|
||||
if [[ ! -f package.json ]]; then
|
||||
echo "Creating temporary package.json in $work_dir for local install..."
|
||||
echo '{"name":"claude-desktop-build","version":"0.0.1","private":true}' > package.json
|
||||
fi
|
||||
|
||||
local electron_dist_path="$work_dir/node_modules/electron/dist"
|
||||
local asar_bin_path="$work_dir/node_modules/.bin/asar"
|
||||
local install_needed=false
|
||||
|
||||
[[ ! -d $electron_dist_path ]] && echo 'Electron distribution not found.' && install_needed=true
|
||||
[[ ! -f $asar_bin_path ]] && echo 'Asar binary not found.' && install_needed=true
|
||||
|
||||
if [[ $install_needed == true ]]; then
|
||||
echo "Installing Electron and Asar locally into $work_dir..."
|
||||
if ! npm install --no-save electron @electron/asar; then
|
||||
echo 'Failed to install Electron and/or Asar locally.' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo 'Electron and Asar installation command finished.'
|
||||
else
|
||||
echo 'Local Electron distribution and Asar binary already present.'
|
||||
fi
|
||||
|
||||
if [[ -d $electron_dist_path ]]; then
|
||||
echo "Found Electron distribution directory at $electron_dist_path."
|
||||
chosen_electron_module_path="$(realpath "$work_dir/node_modules/electron")"
|
||||
echo "Setting Electron module path for copying to $chosen_electron_module_path."
|
||||
else
|
||||
echo "Failed to find Electron distribution directory at '$electron_dist_path' after installation attempt." >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -f $asar_bin_path ]]; then
|
||||
asar_exec="$(realpath "$asar_bin_path")"
|
||||
echo "Found local Asar binary at $asar_exec."
|
||||
else
|
||||
echo "Failed to find Asar binary at '$asar_bin_path' after installation attempt." >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$project_root" || exit 1
|
||||
|
||||
if [[ -z $chosen_electron_module_path || ! -d $chosen_electron_module_path ]]; then
|
||||
echo 'Critical error: Could not resolve a valid Electron module path to copy.' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Using Electron module path: $chosen_electron_module_path"
|
||||
echo "Using asar executable: $asar_exec"
|
||||
section_footer 'Electron & Asar Handling'
|
||||
}
|
||||
223
scripts/setup/detect-host.sh
Normal file
223
scripts/setup/detect-host.sh
Normal file
@@ -0,0 +1,223 @@
|
||||
#===============================================================================
|
||||
# Host detection and argument parsing: architecture, distro, requirements,
|
||||
# CLI flag processing.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals: (none read on entry)
|
||||
# Modifies globals:
|
||||
# architecture, claude_download_url, claude_exe_sha256, claude_exe_filename,
|
||||
# distro_family, original_user, original_home, project_root, work_dir,
|
||||
# app_staging_dir, build_format, cleanup_action, perform_cleanup,
|
||||
# test_flags_mode, local_exe_path, release_tag, source_dir, node_pty_dir
|
||||
#===============================================================================
|
||||
|
||||
detect_architecture() {
|
||||
section_header 'Architecture Detection'
|
||||
echo 'Detecting system architecture...'
|
||||
|
||||
local raw_arch
|
||||
raw_arch=$(uname -m) || {
|
||||
echo 'Failed to detect architecture' >&2
|
||||
exit 1
|
||||
}
|
||||
echo "Detected machine architecture: $raw_arch"
|
||||
|
||||
case "$raw_arch" in
|
||||
x86_64)
|
||||
claude_download_url='https://downloads.claude.ai/releases/win32/x64/1.3109.0/Claude-35cbf6530e05912137624cde0f075dc7f121fa60.exe'
|
||||
claude_exe_sha256='616a7a1c6235709650b0dabe3a06d32f9ade08340891713bd647dff47065f230'
|
||||
architecture='amd64'
|
||||
claude_exe_filename='Claude-Setup-x64.exe'
|
||||
echo 'Configured for amd64 (x86_64) build.'
|
||||
;;
|
||||
aarch64)
|
||||
claude_download_url='https://downloads.claude.ai/releases/win32/arm64/1.3109.0/Claude-35cbf6530e05912137624cde0f075dc7f121fa60.exe'
|
||||
claude_exe_sha256='43fc00b2b94ebf412cae20f15db9fa780a0a3a14e90c60c4549b58f748c3d08d'
|
||||
architecture='arm64'
|
||||
claude_exe_filename='Claude-Setup-arm64.exe'
|
||||
echo 'Configured for arm64 (aarch64) build.'
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported architecture: $raw_arch. This script supports x86_64 (amd64) and aarch64 (arm64)." >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Target Architecture: $architecture"
|
||||
section_footer 'Architecture Detection'
|
||||
}
|
||||
|
||||
detect_distro() {
|
||||
section_header 'Distribution Detection'
|
||||
echo 'Detecting Linux distribution family...'
|
||||
|
||||
if [[ -f /etc/debian_version ]]; then
|
||||
distro_family='debian'
|
||||
echo "Detected Debian-based distribution"
|
||||
echo " Debian version: $(cat /etc/debian_version)"
|
||||
elif [[ -f /etc/fedora-release ]]; then
|
||||
distro_family='rpm'
|
||||
echo "Detected Fedora"
|
||||
echo " $(cat /etc/fedora-release)"
|
||||
elif [[ -f /etc/redhat-release ]]; then
|
||||
distro_family='rpm'
|
||||
echo "Detected Red Hat-based distribution"
|
||||
echo " $(cat /etc/redhat-release)"
|
||||
elif [[ -f /etc/NIXOS ]]; then
|
||||
distro_family='nix'
|
||||
echo "Detected NixOS"
|
||||
else
|
||||
distro_family='unknown'
|
||||
echo "Warning: Could not detect distribution family"
|
||||
echo " AppImage build will still work, but native packages (deb/rpm) may not"
|
||||
fi
|
||||
|
||||
echo "Distribution: $(grep 'PRETTY_NAME' /etc/os-release 2>/dev/null | cut -d'"' -f2 || echo 'Unknown')"
|
||||
echo "Distribution family: $distro_family"
|
||||
section_footer 'Distribution Detection'
|
||||
}
|
||||
|
||||
check_system_requirements() {
|
||||
# Allow running as root in CI/container environments
|
||||
if (( EUID == 0 )); then
|
||||
if [[ -n ${CI:-} || -n ${GITHUB_ACTIONS:-} || -f /.dockerenv ]]; then
|
||||
echo 'Running as root in CI/container environment (allowed)'
|
||||
else
|
||||
echo 'This script should not be run using sudo or as the root user.' >&2
|
||||
echo 'It will use sudo when needed for specific actions (may prompt for password).' >&2
|
||||
echo 'Please run as a normal user.' >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
original_user=$(whoami)
|
||||
original_home=$(getent passwd "$original_user" | cut -d: -f6)
|
||||
if [[ -z $original_home ]]; then
|
||||
echo "Could not determine home directory for user $original_user." >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Running as user: $original_user (Home: $original_home)"
|
||||
|
||||
# Check for NVM and source it if found
|
||||
if [[ -d $original_home/.nvm ]]; then
|
||||
echo "Found NVM installation for user $original_user, checking for Node.js 20+..."
|
||||
export NVM_DIR="$original_home/.nvm"
|
||||
if [[ -s $NVM_DIR/nvm.sh ]]; then
|
||||
# shellcheck disable=SC1091
|
||||
\. "$NVM_DIR/nvm.sh"
|
||||
local node_bin_path=''
|
||||
node_bin_path=$(nvm which current | xargs dirname 2>/dev/null || \
|
||||
find "$NVM_DIR/versions/node" -maxdepth 2 -type d -name 'bin' | sort -V | tail -n 1)
|
||||
|
||||
if [[ -n $node_bin_path && -d $node_bin_path ]]; then
|
||||
echo "Adding NVM Node bin path to PATH: $node_bin_path"
|
||||
export PATH="$node_bin_path:$PATH"
|
||||
else
|
||||
echo 'Warning: Could not determine NVM Node bin path.'
|
||||
fi
|
||||
else
|
||||
echo 'Warning: nvm.sh script not found or not sourceable.'
|
||||
fi
|
||||
fi
|
||||
|
||||
echo 'System Information:'
|
||||
echo "Distribution: $(grep 'PRETTY_NAME' /etc/os-release 2>/dev/null | cut -d'"' -f2 || echo 'Unknown')"
|
||||
echo "Distribution family: $distro_family"
|
||||
echo "Target Architecture: $architecture"
|
||||
}
|
||||
|
||||
parse_arguments() {
|
||||
section_header 'Argument Parsing'
|
||||
|
||||
project_root="$(pwd)"
|
||||
work_dir="$project_root/build"
|
||||
app_staging_dir="$work_dir/electron-app"
|
||||
|
||||
# Set default build format based on detected distro
|
||||
case "$distro_family" in
|
||||
debian) build_format='deb' ;;
|
||||
rpm) build_format='rpm' ;;
|
||||
nix) build_format='nix' ;;
|
||||
*) build_format='appimage' ;;
|
||||
esac
|
||||
|
||||
while (( $# > 0 )); do
|
||||
case "$1" in
|
||||
-b|--build|-c|--clean|-e|--exe|-r|--release-tag|-s|--source-dir|--node-pty-dir)
|
||||
if [[ -z ${2:-} || $2 == -* ]]; then
|
||||
echo "Error: Argument for $1 is missing" >&2
|
||||
exit 1
|
||||
fi
|
||||
case "$1" in
|
||||
-b|--build) build_format="$2" ;;
|
||||
-c|--clean) cleanup_action="$2" ;;
|
||||
-e|--exe) local_exe_path="$2" ;;
|
||||
-r|--release-tag) release_tag="$2" ;;
|
||||
-s|--source-dir) source_dir="$2" ;;
|
||||
--node-pty-dir) node_pty_dir="$2" ;;
|
||||
esac
|
||||
shift 2
|
||||
;;
|
||||
--test-flags)
|
||||
test_flags_mode=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [--build deb|rpm|appimage|nix] [--clean yes|no] [--exe /path/to/installer.exe] [--source-dir /path] [--release-tag TAG] [--test-flags]"
|
||||
echo ' --build: Specify the build format (deb, rpm, appimage, or nix).'
|
||||
echo " Default: auto-detected based on distro (current: $build_format)"
|
||||
echo ' --clean: Specify whether to clean intermediate build files (yes or no). Default: yes'
|
||||
echo ' --exe: Use a local Claude installer exe instead of downloading'
|
||||
echo ' --source-dir: Path to repo root for scripts/ and assets (default: project root)'
|
||||
echo ' --node-pty-dir: Path to pre-built node-pty package (skips npm install)'
|
||||
echo ' --release-tag: Release tag (e.g., v1.3.2+claude1.1.799) to append wrapper version to package'
|
||||
echo ' --test-flags: Parse flags, print results, and exit without building.'
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
echo 'Use -h or --help for usage information.' >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# source_dir is where scripts/assets live (default: project_root)
|
||||
source_dir="${source_dir:-$project_root}"
|
||||
|
||||
# Validate arguments
|
||||
build_format="${build_format,,}"
|
||||
cleanup_action="${cleanup_action,,}"
|
||||
|
||||
if [[ ! -d $source_dir ]]; then
|
||||
echo "Error: --source-dir path does not exist: $source_dir" >&2
|
||||
exit 1
|
||||
fi
|
||||
if [[ -n $node_pty_dir && ! -d $node_pty_dir ]]; then
|
||||
echo "Error: --node-pty-dir path does not exist: $node_pty_dir" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $build_format != 'deb' && $build_format != 'rpm' && $build_format != 'appimage' && $build_format != 'nix' ]]; then
|
||||
echo "Invalid build format specified: '$build_format'. Must be 'deb', 'rpm', 'appimage', or 'nix'." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Warn if building native package for wrong distro
|
||||
if [[ $build_format == 'deb' && $distro_family != 'debian' ]]; then
|
||||
echo "Warning: Building .deb package on non-Debian system ($distro_family). This may fail." >&2
|
||||
elif [[ $build_format == 'rpm' && $distro_family != 'rpm' ]]; then
|
||||
echo "Warning: Building .rpm package on non-RPM system ($distro_family). This may fail." >&2
|
||||
fi
|
||||
if [[ $cleanup_action != 'yes' && $cleanup_action != 'no' ]]; then
|
||||
echo "Invalid cleanup option specified: '$cleanup_action'. Must be 'yes' or 'no'." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Selected build format: $build_format"
|
||||
echo "Cleanup intermediate files: $cleanup_action"
|
||||
|
||||
[[ $cleanup_action == 'yes' ]] && perform_cleanup=true
|
||||
|
||||
section_footer 'Argument Parsing'
|
||||
}
|
||||
89
scripts/setup/download.sh
Normal file
89
scripts/setup/download.sh
Normal file
@@ -0,0 +1,89 @@
|
||||
#===============================================================================
|
||||
# Claude installer download and extraction into work_dir/claude-extract.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# work_dir, claude_exe_filename, local_exe_path, architecture,
|
||||
# claude_download_url, claude_exe_sha256, project_root, release_tag
|
||||
# Modifies globals:
|
||||
# claude_extract_dir, version
|
||||
#===============================================================================
|
||||
|
||||
download_claude_installer() {
|
||||
section_header 'Download the latest Claude executable'
|
||||
|
||||
local claude_exe_path="$work_dir/$claude_exe_filename"
|
||||
|
||||
if [[ -n $local_exe_path ]]; then
|
||||
echo "Using local Claude installer: $local_exe_path"
|
||||
if [[ ! -f $local_exe_path ]]; then
|
||||
echo "Local installer file not found: $local_exe_path" >&2
|
||||
exit 1
|
||||
fi
|
||||
cp "$local_exe_path" "$claude_exe_path" || exit 1
|
||||
echo 'Local installer copied to build directory'
|
||||
else
|
||||
echo "Downloading Claude Desktop installer for $architecture..."
|
||||
if ! wget -O "$claude_exe_path" "$claude_download_url"; then
|
||||
echo "Failed to download Claude Desktop installer from $claude_download_url" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Download complete: $claude_exe_filename"
|
||||
|
||||
if ! verify_sha256 "$claude_exe_path" \
|
||||
"$claude_exe_sha256" 'Claude Desktop installer'; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Extracting resources from $claude_exe_filename into separate directory..."
|
||||
claude_extract_dir="$work_dir/claude-extract"
|
||||
mkdir -p "$claude_extract_dir" || exit 1
|
||||
|
||||
if ! 7z x -y "$claude_exe_path" -o"$claude_extract_dir"; then
|
||||
echo 'Failed to extract installer' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$claude_extract_dir" || exit 1
|
||||
local nupkg_path_relative
|
||||
nupkg_path_relative=$(find . -maxdepth 1 -name 'AnthropicClaude-*.nupkg' | head -1)
|
||||
|
||||
if [[ -z $nupkg_path_relative ]]; then
|
||||
echo "Could not find AnthropicClaude nupkg file in $claude_extract_dir" >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo "Found nupkg: $nupkg_path_relative (in $claude_extract_dir)"
|
||||
|
||||
version=$(echo "$nupkg_path_relative" | LC_ALL=C grep -oP 'AnthropicClaude-\K[0-9]+\.[0-9]+\.[0-9]+(?=-full|-arm64-full)')
|
||||
if [[ -z $version ]]; then
|
||||
echo "Could not extract version from nupkg filename: $nupkg_path_relative" >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo "Detected Claude version: $version"
|
||||
|
||||
# Extract wrapper version from release tag if provided (e.g., v1.3.2+claude1.1.799 -> 1.3.2)
|
||||
if [[ -n $release_tag ]]; then
|
||||
local wrapper_version
|
||||
# Extract version between 'v' and '+claude' (e.g., v1.3.2+claude1.1.799 -> 1.3.2)
|
||||
wrapper_version=$(echo "$release_tag" | LC_ALL=C grep -oP '^v\K[0-9]+\.[0-9]+\.[0-9]+(?=\+claude)')
|
||||
if [[ -n $wrapper_version ]]; then
|
||||
version="${version}-${wrapper_version}"
|
||||
echo "Package version with wrapper suffix: $version"
|
||||
else
|
||||
echo "Warning: Could not extract wrapper version from release tag: $release_tag" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! 7z x -y "$nupkg_path_relative"; then
|
||||
echo 'Failed to extract nupkg' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
echo 'Resources extracted from nupkg'
|
||||
|
||||
cd "$project_root" || exit 1
|
||||
}
|
||||
45
scripts/staging/cowork-resources.sh
Normal file
45
scripts/staging/cowork-resources.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
#===============================================================================
|
||||
# Cowork runtime resources: plugin shim script and architecture-specific
|
||||
# smol-bin VHDX for KVM guest SDK access.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# claude_extract_dir, electron_resources_dest, architecture
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
copy_cowork_resources() {
|
||||
section_header 'Cowork Resources'
|
||||
|
||||
local resources_src="$claude_extract_dir/lib/net45/resources"
|
||||
|
||||
# Copy cowork-plugin-shim.sh (used by app for MCP plugin sandboxing)
|
||||
local shim_src="$resources_src/cowork-plugin-shim.sh"
|
||||
if [[ -f $shim_src ]]; then
|
||||
cp "$shim_src" "$electron_resources_dest/cowork-plugin-shim.sh"
|
||||
chmod +x "$electron_resources_dest/cowork-plugin-shim.sh"
|
||||
echo "Copied cowork-plugin-shim.sh"
|
||||
else
|
||||
echo "Warning: cowork-plugin-shim.sh not found at $shim_src"
|
||||
fi
|
||||
|
||||
# Copy smol-bin VHDX (contains SDK binaries for KVM guest VM).
|
||||
# The app copies this from resources to the bundle dir at startup
|
||||
# (win32-gated; our index.js patch extends this to Linux).
|
||||
# App looks for smol-bin.{arch}.vhdx where arch is x64 or arm64.
|
||||
local smol_arch='x64'
|
||||
if [[ $architecture == 'arm64' ]]; then
|
||||
smol_arch='arm64'
|
||||
fi
|
||||
local smol_vhdx="$resources_src/smol-bin.${smol_arch}.vhdx"
|
||||
if [[ -f $smol_vhdx ]]; then
|
||||
cp "$smol_vhdx" \
|
||||
"$electron_resources_dest/smol-bin.${smol_arch}.vhdx"
|
||||
echo "Copied smol-bin.${smol_arch}.vhdx"
|
||||
else
|
||||
echo "Warning: smol-bin VHDX not found at $smol_vhdx"
|
||||
echo "KVM Cowork will rely on virtiofs for SDK access"
|
||||
fi
|
||||
|
||||
section_footer 'Cowork Resources'
|
||||
}
|
||||
79
scripts/staging/electron.sh
Normal file
79
scripts/staging/electron.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#===============================================================================
|
||||
# Electron staging: finalize app.asar (pack with --unpack for native modules,
|
||||
# copy native stubs and cowork daemon) and copy the Electron module tree into
|
||||
# the staging directory with correct permissions.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# asar_exec, app_staging_dir, source_dir, node_pty_dir, node_pty_build_dir,
|
||||
# chosen_electron_module_path
|
||||
# Modifies globals: electron_resources_dest
|
||||
#===============================================================================
|
||||
|
||||
finalize_app_asar() {
|
||||
# Pack with --unpack so native modules (.node) are extracted
|
||||
# into app.asar.unpacked/ AND tracked in the asar manifest as
|
||||
# unpacked. Electron's asar->.unpacked redirect requires the
|
||||
# manifest entry to exist; otherwise loaders that require()
|
||||
# files from inside the asar get MODULE_NOT_FOUND.
|
||||
"$asar_exec" pack app.asar.contents app.asar \
|
||||
--unpack '**/*.node' || exit 1
|
||||
|
||||
mkdir -p "$app_staging_dir/app.asar.unpacked/node_modules/@ant/claude-native" || exit 1
|
||||
cp "$source_dir/scripts/claude-native-stub.js" \
|
||||
"$app_staging_dir/app.asar.unpacked/node_modules/@ant/claude-native/index.js" || exit 1
|
||||
|
||||
# Copy cowork VM service daemon (must be unpacked for child_process.fork)
|
||||
echo 'Copying cowork VM service daemon to unpacked directory...'
|
||||
cp "$source_dir/scripts/cowork-vm-service.js" \
|
||||
"$app_staging_dir/app.asar.unpacked/cowork-vm-service.js" || exit 1
|
||||
echo 'Cowork VM service daemon copied to unpacked'
|
||||
|
||||
# Copy node-pty native binaries
|
||||
local pty_release_dir=''
|
||||
if [[ -n $node_pty_dir && -d $node_pty_dir/build/Release ]]; then
|
||||
pty_release_dir="$node_pty_dir/build/Release"
|
||||
elif [[ -n $node_pty_build_dir && -d $node_pty_build_dir/node_modules/node-pty/build/Release ]]; then
|
||||
pty_release_dir="$node_pty_build_dir/node_modules/node-pty/build/Release"
|
||||
fi
|
||||
|
||||
if [[ -n $pty_release_dir ]]; then
|
||||
echo 'Copying node-pty native binaries to unpacked directory...'
|
||||
mkdir -p "$app_staging_dir/app.asar.unpacked/node_modules/node-pty/build/Release" || exit 1
|
||||
cp -r --no-preserve=mode "$pty_release_dir/"* \
|
||||
"$app_staging_dir/app.asar.unpacked/node_modules/node-pty/build/Release/" || exit 1
|
||||
chmod +x "$app_staging_dir/app.asar.unpacked/node_modules/node-pty/build/Release/"* 2>/dev/null || true
|
||||
echo 'node-pty native binaries copied'
|
||||
else
|
||||
echo 'node-pty native binaries not found - terminal features may not work'
|
||||
fi
|
||||
}
|
||||
|
||||
stage_electron() {
|
||||
echo 'Copying chosen electron installation to staging area...'
|
||||
mkdir -p "$app_staging_dir/node_modules/" || exit 1
|
||||
local electron_dir_name
|
||||
electron_dir_name=$(basename "$chosen_electron_module_path")
|
||||
echo "Copying from $chosen_electron_module_path to $app_staging_dir/node_modules/"
|
||||
cp -a "$chosen_electron_module_path" "$app_staging_dir/node_modules/" || exit 1
|
||||
|
||||
local staged_electron_bin="$app_staging_dir/node_modules/$electron_dir_name/dist/electron"
|
||||
if [[ -f $staged_electron_bin ]]; then
|
||||
echo "Setting executable permission on staged Electron binary: $staged_electron_bin"
|
||||
chmod +x "$staged_electron_bin" || exit 1
|
||||
else
|
||||
echo "Warning: Staged Electron binary not found at expected path: $staged_electron_bin"
|
||||
fi
|
||||
|
||||
# Copy Electron locale files
|
||||
local electron_resources_src="$chosen_electron_module_path/dist/resources"
|
||||
electron_resources_dest="$app_staging_dir/node_modules/$electron_dir_name/dist/resources"
|
||||
if [[ -d $electron_resources_src ]]; then
|
||||
echo 'Copying Electron locale resources...'
|
||||
mkdir -p "$electron_resources_dest" || exit 1
|
||||
cp -a "$electron_resources_src"/* "$electron_resources_dest/" || exit 1
|
||||
echo 'Electron locale resources copied'
|
||||
else
|
||||
echo "Warning: Electron resources directory not found at $electron_resources_src"
|
||||
fi
|
||||
}
|
||||
78
scripts/staging/icons.sh
Normal file
78
scripts/staging/icons.sh
Normal file
@@ -0,0 +1,78 @@
|
||||
#===============================================================================
|
||||
# Icon processing: extract exe icons with wrestool/icotool, convert tray
|
||||
# icons to 100% opaque PNG so they render on Linux panels.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# claude_extract_dir, project_root, work_dir, electron_resources_dest
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
process_icons() {
|
||||
section_header 'Icon Processing'
|
||||
|
||||
cd "$claude_extract_dir" || exit 1
|
||||
local exe_path='lib/net45/claude.exe'
|
||||
if [[ ! -f $exe_path ]]; then
|
||||
echo "Cannot find claude.exe at expected path: $claude_extract_dir/$exe_path" >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Extracting application icons from $exe_path..."
|
||||
if ! wrestool -x -t 14 "$exe_path" -o claude.ico; then
|
||||
echo 'Failed to extract icons from exe' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! icotool -x claude.ico; then
|
||||
echo 'Failed to convert icons' >&2
|
||||
cd "$project_root" || exit 1
|
||||
exit 1
|
||||
fi
|
||||
cp claude_*.png "$work_dir/" || exit 1
|
||||
echo "Application icons extracted and copied to $work_dir"
|
||||
|
||||
cd "$project_root" || exit 1
|
||||
|
||||
# Process tray icons
|
||||
local claude_locale_src="$claude_extract_dir/lib/net45/resources"
|
||||
echo 'Copying and processing tray icon files for Linux...'
|
||||
if [[ ! -d $claude_locale_src ]]; then
|
||||
echo "Warning: Claude resources directory not found at $claude_locale_src"
|
||||
section_footer 'Icon Processing'
|
||||
return
|
||||
fi
|
||||
|
||||
cp "$claude_locale_src/Tray"* "$electron_resources_dest/" 2>/dev/null || \
|
||||
echo 'Warning: No tray icon files found'
|
||||
|
||||
# Find ImageMagick command
|
||||
local magick_cmd=''
|
||||
command -v magick &> /dev/null && magick_cmd='magick'
|
||||
[[ -z $magick_cmd ]] && command -v convert &> /dev/null && magick_cmd='convert'
|
||||
|
||||
if [[ -z $magick_cmd ]]; then
|
||||
echo 'Warning: ImageMagick not found - tray icons may appear invisible'
|
||||
echo 'Tray icon files copied (unprocessed)'
|
||||
section_footer 'Icon Processing'
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Processing tray icons for Linux visibility (using $magick_cmd)..."
|
||||
local icon_file icon_name
|
||||
for icon_file in "$electron_resources_dest"/TrayIconTemplate*.png; do
|
||||
[[ ! -f $icon_file ]] && continue
|
||||
icon_name=$(basename "$icon_file")
|
||||
if "$magick_cmd" "$icon_file" -channel A -fx 'a>0?1:0' +channel \
|
||||
"PNG32:$icon_file" 2>/dev/null; then
|
||||
echo " Processed $icon_name (100% opaque)"
|
||||
else
|
||||
echo " Failed to process $icon_name"
|
||||
fi
|
||||
done
|
||||
echo 'Tray icon files copied and processed'
|
||||
|
||||
section_footer 'Icon Processing'
|
||||
}
|
||||
21
scripts/staging/locales.sh
Normal file
21
scripts/staging/locales.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#===============================================================================
|
||||
# Locale file staging: copy Claude i18n JSON into Electron's resources dir.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# claude_extract_dir, electron_resources_dest, app_staging_dir
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
copy_locale_files() {
|
||||
local claude_locale_src="$claude_extract_dir/lib/net45/resources"
|
||||
echo 'Copying Claude locale JSON files to Electron resources directory...'
|
||||
if [[ -d $claude_locale_src ]]; then
|
||||
cp "$claude_locale_src/"*-*.json "$electron_resources_dest/" || exit 1
|
||||
echo 'Claude locale JSON files copied to Electron resources directory'
|
||||
else
|
||||
echo "Warning: Claude locale source directory not found at $claude_locale_src"
|
||||
fi
|
||||
|
||||
echo "app.asar processed and staged in $app_staging_dir"
|
||||
}
|
||||
34
scripts/staging/ssh-helpers.sh
Normal file
34
scripts/staging/ssh-helpers.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
#===============================================================================
|
||||
# SSH helper staging: copy architecture-specific claude-ssh binary into the
|
||||
# Electron resources directory.
|
||||
#
|
||||
# Sourced by: build.sh
|
||||
# Sourced globals:
|
||||
# claude_extract_dir, electron_resources_dest, architecture
|
||||
# Modifies globals: (none)
|
||||
#===============================================================================
|
||||
|
||||
copy_ssh_helpers() {
|
||||
section_header 'SSH Helpers'
|
||||
|
||||
local ssh_src="$claude_extract_dir/lib/net45/resources/claude-ssh"
|
||||
local ssh_dest="$electron_resources_dest/claude-ssh"
|
||||
local binary_name="claude-ssh-linux-$architecture"
|
||||
|
||||
if [[ ! -d "$ssh_src" ]]; then
|
||||
echo "Warning: SSH helpers not found at $ssh_src"
|
||||
section_footer 'SSH Helpers'
|
||||
return
|
||||
fi
|
||||
|
||||
mkdir -p "$ssh_dest" || exit 1
|
||||
cp "$ssh_src/version.txt" "$ssh_dest/" || exit 1
|
||||
cp "$ssh_src/$binary_name" "$ssh_dest/" || exit 1
|
||||
chmod +x "$ssh_dest/$binary_name"
|
||||
|
||||
echo "Copied SSH helper files:"
|
||||
echo " version.txt"
|
||||
echo " $binary_name"
|
||||
|
||||
section_footer 'SSH Helpers'
|
||||
}
|
||||
Reference in New Issue
Block a user