refactor(Webpack): more reliable patching (#2237)

This commit is contained in:
Nuckyz 2024-05-02 18:52:41 -03:00 committed by GitHub
parent 0a598ae966
commit a055b1d47b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 443 additions and 302 deletions

View file

@ -269,7 +269,7 @@ page.on("pageerror", e => console.error("[Page Error]", e));
await page.setBypassCSP(true); await page.setBypassCSP(true);
function runTime(token: string) { async function runtime(token: string) {
console.log("[PUP_DEBUG]", "Starting test..."); console.log("[PUP_DEBUG]", "Starting test...");
try { try {
@ -282,9 +282,13 @@ function runTime(token: string) {
// Monkey patch Logger to not log with custom css // Monkey patch Logger to not log with custom css
// @ts-ignore // @ts-ignore
const originalLog = Vencord.Util.Logger.prototype._log;
// @ts-ignore
Vencord.Util.Logger.prototype._log = function (level, levelColor, args) { Vencord.Util.Logger.prototype._log = function (level, levelColor, args) {
if (level === "warn" || level === "error") if (level === "warn" || level === "error")
console[level]("[Vencord]", this.name + ":", ...args); return console[level]("[Vencord]", this.name + ":", ...args);
return originalLog.call(this, level, levelColor, args);
}; };
// Force enable all plugins and patches // Force enable all plugins and patches
@ -310,45 +314,30 @@ function runTime(token: string) {
}); });
}); });
Vencord.Webpack.waitFor( let wreq: typeof Vencord.Webpack.wreq;
"loginToken",
m => {
console.log("[PUP_DEBUG]", "Logging in with token...");
m.loginToken(token);
}
);
// Force load all chunks const { canonicalizeMatch, Logger } = Vencord.Util;
Vencord.Webpack.onceReady.then(() => setTimeout(async () => {
console.log("[PUP_DEBUG]", "Webpack is ready!");
const { wreq } = Vencord.Webpack;
console.log("[PUP_DEBUG]", "Loading all chunks...");
let chunks = null as Record<number, string[]> | null;
const sym = Symbol("Vencord.chunksExtract");
Object.defineProperty(Object.prototype, sym, {
get() {
chunks = this;
},
set() { },
configurable: true,
});
await (wreq as any).el(sym);
delete Object.prototype[sym];
const validChunksEntryPoints = new Set<string>();
const validChunks = new Set<string>(); const validChunks = new Set<string>();
const invalidChunks = new Set<string>(); const invalidChunks = new Set<string>();
if (!chunks) throw new Error("Failed to get chunks"); let chunksSearchingResolve: (value: void | PromiseLike<void>) => void;
const chunksSearchingDone = new Promise<void>(r => chunksSearchingResolve = r);
for (const entryPoint in chunks) { // True if resolved, false otherwise
const chunkIds = chunks[entryPoint]; const chunksSearchPromises = [] as Array<() => boolean>;
let invalidEntryPoint = false; const lazyChunkRegex = canonicalizeMatch(/Promise\.all\((\[\i\.\i\(".+?"\).+?\])\).then\(\i\.bind\(\i,"(.+?)"\)\)/g);
const chunkIdsRegex = canonicalizeMatch(/\("(.+?)"\)/g);
async function searchAndLoadLazyChunks(factoryCode: string) {
const lazyChunks = factoryCode.matchAll(lazyChunkRegex);
const validChunkGroups = new Set<[chunkIds: string[], entryPoint: string]>();
await Promise.all(Array.from(lazyChunks).map(async ([, rawChunkIds, entryPoint]) => {
const chunkIds = Array.from(rawChunkIds.matchAll(chunkIdsRegex)).map(m => m[1]);
if (chunkIds.length === 0) return;
let invalidChunkGroup = false;
for (const id of chunkIds) { for (const id of chunkIds) {
if (wreq.u(id) == null || wreq.u(id) === "undefined.js") continue; if (wreq.u(id) == null || wreq.u(id) === "undefined.js") continue;
@ -359,56 +348,28 @@ function runTime(token: string) {
if (isWasm) { if (isWasm) {
invalidChunks.add(id); invalidChunks.add(id);
invalidEntryPoint = true; invalidChunkGroup = true;
continue; continue;
} }
validChunks.add(id); validChunks.add(id);
} }
if (!invalidEntryPoint) if (!invalidChunkGroup) {
validChunksEntryPoints.add(entryPoint); validChunkGroups.add([chunkIds, entryPoint]);
} }
}));
for (const entryPoint of validChunksEntryPoints) { // Loads all found valid chunk groups
try { await Promise.all(
// Loads all chunks required for an entry point Array.from(validChunkGroups)
await (wreq as any).el(entryPoint); .map(([chunkIds]) =>
} catch (err) { } Promise.all(chunkIds.map(id => wreq.e(id as any).catch(() => { })))
} )
);
// Matches "id" or id: // Requires the entry points for all valid chunk groups
const chunkIdRegex = /(?:"(\d+?)")|(?:(\d+?):)/g; for (const [, entryPoint] of validChunkGroups) {
const wreqU = wreq.u.toString();
const allChunks = [] as string[];
let currentMatch: RegExpExecArray | null;
while ((currentMatch = chunkIdRegex.exec(wreqU)) != null) {
const id = currentMatch[1] ?? currentMatch[2];
if (id == null) continue;
allChunks.push(id);
}
if (allChunks.length === 0) throw new Error("Failed to get all chunks");
const chunksLeft = allChunks.filter(id => {
return !(validChunks.has(id) || invalidChunks.has(id));
});
for (const id of chunksLeft) {
const isWasm = await fetch(wreq.p + wreq.u(id))
.then(r => r.text())
.then(t => t.includes(".module.wasm") || !t.includes("(this.webpackChunkdiscord_app=this.webpackChunkdiscord_app||[]).push"));
// Loads a chunk
if (!isWasm) await wreq.e(id as any);
}
// Make sure every chunk has finished loading
await new Promise(r => setTimeout(r, 1000));
for (const entryPoint of validChunksEntryPoints) {
try { try {
if (wreq.m[entryPoint]) wreq(entryPoint as any); if (wreq.m[entryPoint]) wreq(entryPoint as any);
} catch (err) { } catch (err) {
@ -416,11 +377,97 @@ function runTime(token: string) {
} }
} }
// setImmediate to only check if all chunks were loaded after this function resolves
// We check if all chunks were loaded every time a factory is loaded
// If we are still looking for chunks in the other factories, the array will have that factory's chunk search promise not resolved
// But, if all chunk search promises are resolved, this means we found every lazy chunk loaded by Discord code and manually loaded them
setTimeout(() => {
let allResolved = true;
for (let i = 0; i < chunksSearchPromises.length; i++) {
const isResolved = chunksSearchPromises[i]();
if (isResolved) {
// Remove finished promises to avoid having to iterate through a huge array everytime
chunksSearchPromises.splice(i--, 1);
} else {
allResolved = false;
}
}
if (allResolved) chunksSearchingResolve();
}, 0);
}
Vencord.Webpack.waitFor(
"loginToken",
m => {
console.log("[PUP_DEBUG]", "Logging in with token...");
m.loginToken(token);
}
);
Vencord.Webpack.beforeInitListeners.add(async webpackRequire => {
console.log("[PUP_DEBUG]", "Loading all chunks...");
wreq = webpackRequire;
Vencord.Webpack.factoryListeners.add(factory => {
let isResolved = false;
searchAndLoadLazyChunks(factory.toString()).then(() => isResolved = true);
chunksSearchPromises.push(() => isResolved);
});
// setImmediate to only search the initial factories after Discord initialized the app
// our beforeInitListeners are called before Discord initializes the app
setTimeout(() => {
for (const factoryId in wreq.m) {
let isResolved = false;
searchAndLoadLazyChunks(wreq.m[factoryId].toString()).then(() => isResolved = true);
chunksSearchPromises.push(() => isResolved);
}
}, 0);
});
await chunksSearchingDone;
// All chunks Discord has mapped to asset files, even if they are not used anymore
const allChunks = [] as string[];
// Matches "id" or id:
for (const currentMatch of wreq!.u.toString().matchAll(/(?:"(\d+?)")|(?:(\d+?):)/g)) {
const id = currentMatch[1] ?? currentMatch[2];
if (id == null) continue;
allChunks.push(id);
}
if (allChunks.length === 0) throw new Error("Failed to get all chunks");
// Chunks that are not loaded (not used) by Discord code anymore
const chunksLeft = allChunks.filter(id => {
return !(validChunks.has(id) || invalidChunks.has(id));
});
await Promise.all(chunksLeft.map(async id => {
const isWasm = await fetch(wreq.p + wreq.u(id))
.then(r => r.text())
.then(t => t.includes(".module.wasm") || !t.includes("(this.webpackChunkdiscord_app=this.webpackChunkdiscord_app||[]).push"));
// Loads and requires a chunk
if (!isWasm) {
await wreq.e(id as any);
if (wreq.m[id]) wreq(id as any);
}
}));
console.log("[PUP_DEBUG]", "Finished loading all chunks!"); console.log("[PUP_DEBUG]", "Finished loading all chunks!");
for (const patch of Vencord.Plugins.patches) { for (const patch of Vencord.Plugins.patches) {
if (!patch.all) { if (!patch.all) {
new Vencord.Util.Logger("WebpackInterceptor").warn(`Patch by ${patch.plugin} found no module (Module id is -): ${patch.find}`); new Logger("WebpackInterceptor").warn(`Patch by ${patch.plugin} found no module (Module id is -): ${patch.find}`);
} }
} }
@ -445,7 +492,7 @@ function runTime(token: string) {
const [code, matcher] = args; const [code, matcher] = args;
const module = Vencord.Webpack.findModuleFactory(...code); const module = Vencord.Webpack.findModuleFactory(...code);
if (module) result = module.toString().match(Vencord.Util.canonicalizeMatch(matcher)); if (module) result = module.toString().match(canonicalizeMatch(matcher));
} else { } else {
// @ts-ignore // @ts-ignore
result = Vencord.Webpack[method](...args); result = Vencord.Webpack[method](...args);
@ -463,7 +510,6 @@ function runTime(token: string) {
} }
setTimeout(() => console.log("[PUPPETEER_TEST_DONE_SIGNAL]"), 1000); setTimeout(() => console.log("[PUPPETEER_TEST_DONE_SIGNAL]"), 1000);
}, 1000));
} catch (e) { } catch (e) {
console.log("[PUP_DEBUG]", "A fatal error occurred:", e); console.log("[PUP_DEBUG]", "A fatal error occurred:", e);
process.exit(1); process.exit(1);
@ -473,7 +519,7 @@ function runTime(token: string) {
await page.evaluateOnNewDocument(` await page.evaluateOnNewDocument(`
${readFileSync("./dist/browser.js", "utf-8")} ${readFileSync("./dist/browser.js", "utf-8")}
;(${runTime.toString()})(${JSON.stringify(process.env.DISCORD_TOKEN)}); ;(${runtime.toString()})(${JSON.stringify(process.env.DISCORD_TOKEN)});
`); `);
await page.goto(CANARY ? "https://canary.discord.com/login" : "https://discord.com/login"); await page.goto(CANARY ? "https://canary.discord.com/login" : "https://discord.com/login");

View file

@ -34,6 +34,9 @@ export const PMLogger = logger;
export const plugins = Plugins; export const plugins = Plugins;
export const patches = [] as Patch[]; export const patches = [] as Patch[];
/** Whether we have subscribed to flux events of all the enabled plugins when FluxDispatcher was ready */
let enabledPluginsSubscribedFlux = false;
const settings = Settings.plugins; const settings = Settings.plugins;
export function isPluginEnabled(p: string) { export function isPluginEnabled(p: string) {
@ -119,6 +122,33 @@ export function startDependenciesRecursive(p: Plugin) {
return { restartNeeded, failures }; return { restartNeeded, failures };
} }
export function subscribePluginFluxEvents(p: Plugin, fluxDispatcher: typeof FluxDispatcher) {
if (p.flux) {
logger.debug("Subscribing to flux events of plugin", p.name);
for (const [event, handler] of Object.entries(p.flux)) {
fluxDispatcher.subscribe(event as FluxEvents, handler);
}
}
}
export function unsubscribePluginFluxEvents(p: Plugin, fluxDispatcher: typeof FluxDispatcher) {
if (p.flux) {
logger.debug("Unsubscribing from flux events of plugin", p.name);
for (const [event, handler] of Object.entries(p.flux)) {
fluxDispatcher.unsubscribe(event as FluxEvents, handler);
}
}
}
export function subscribeAllPluginsFluxEvents(fluxDispatcher: typeof FluxDispatcher) {
enabledPluginsSubscribedFlux = true;
for (const name in Plugins) {
if (!isPluginEnabled(name)) continue;
subscribePluginFluxEvents(Plugins[name], fluxDispatcher);
}
}
export const startPlugin = traceFunction("startPlugin", function startPlugin(p: Plugin) { export const startPlugin = traceFunction("startPlugin", function startPlugin(p: Plugin) {
const { name, commands, flux, contextMenus } = p; const { name, commands, flux, contextMenus } = p;
@ -138,7 +168,7 @@ export const startPlugin = traceFunction("startPlugin", function startPlugin(p:
} }
if (commands?.length) { if (commands?.length) {
logger.info("Registering commands of plugin", name); logger.debug("Registering commands of plugin", name);
for (const cmd of commands) { for (const cmd of commands) {
try { try {
registerCommand(cmd, name); registerCommand(cmd, name);
@ -149,13 +179,13 @@ export const startPlugin = traceFunction("startPlugin", function startPlugin(p:
} }
} }
if (flux) { if (enabledPluginsSubscribedFlux) {
for (const event in flux) { subscribePluginFluxEvents(p, FluxDispatcher);
FluxDispatcher.subscribe(event as FluxEvents, flux[event]);
}
} }
if (contextMenus) { if (contextMenus) {
logger.debug("Adding context menus patches of plugin", name);
for (const navId in contextMenus) { for (const navId in contextMenus) {
addContextMenuPatch(navId, contextMenus[navId]); addContextMenuPatch(navId, contextMenus[navId]);
} }
@ -182,7 +212,7 @@ export const stopPlugin = traceFunction("stopPlugin", function stopPlugin(p: Plu
} }
if (commands?.length) { if (commands?.length) {
logger.info("Unregistering commands of plugin", name); logger.debug("Unregistering commands of plugin", name);
for (const cmd of commands) { for (const cmd of commands) {
try { try {
unregisterCommand(cmd.name); unregisterCommand(cmd.name);
@ -193,13 +223,10 @@ export const stopPlugin = traceFunction("stopPlugin", function stopPlugin(p: Plu
} }
} }
if (flux) { unsubscribePluginFluxEvents(p, FluxDispatcher);
for (const event in flux) {
FluxDispatcher.unsubscribe(event as FluxEvents, flux[event]);
}
}
if (contextMenus) { if (contextMenus) {
logger.debug("Removing context menus patches of plugin", name);
for (const navId in contextMenus) { for (const navId in contextMenus) {
removeContextMenuPatch(navId, contextMenus[navId]); removeContextMenuPatch(navId, contextMenus[navId]);
} }

View file

@ -36,6 +36,8 @@ const enum ShowMode {
HiddenIconWithMutedStyle HiddenIconWithMutedStyle
} }
const CONNECT = 1n << 20n;
export const settings = definePluginSettings({ export const settings = definePluginSettings({
hideUnreads: { hideUnreads: {
description: "Hide Unreads", description: "Hide Unreads",
@ -273,12 +275,12 @@ export default definePlugin({
{ {
// Change the role permission check to CONNECT if the channel is locked // Change the role permission check to CONNECT if the channel is locked
match: /ADMINISTRATOR\)\|\|(?<=context:(\i)}.+?)(?=(.+?)VIEW_CHANNEL)/, match: /ADMINISTRATOR\)\|\|(?<=context:(\i)}.+?)(?=(.+?)VIEW_CHANNEL)/,
replace: (m, channel, permCheck) => `${m}!Vencord.Webpack.Common.PermissionStore.can(${PermissionsBits.CONNECT}n,${channel})?${permCheck}CONNECT):` replace: (m, channel, permCheck) => `${m}!Vencord.Webpack.Common.PermissionStore.can(${CONNECT}n,${channel})?${permCheck}CONNECT):`
}, },
{ {
// Change the permissionOverwrite check to CONNECT if the channel is locked // Change the permissionOverwrite check to CONNECT if the channel is locked
match: /permissionOverwrites\[.+?\i=(?<=context:(\i)}.+?)(?=(.+?)VIEW_CHANNEL)/, match: /permissionOverwrites\[.+?\i=(?<=context:(\i)}.+?)(?=(.+?)VIEW_CHANNEL)/,
replace: (m, channel, permCheck) => `${m}!Vencord.Webpack.Common.PermissionStore.can(${PermissionsBits.CONNECT}n,${channel})?${permCheck}CONNECT):` replace: (m, channel, permCheck) => `${m}!Vencord.Webpack.Common.PermissionStore.can(${CONNECT}n,${channel})?${permCheck}CONNECT):`
}, },
{ {
// Include the @everyone role in the allowed roles list for Hidden Channels // Include the @everyone role in the allowed roles list for Hidden Channels

View file

@ -18,20 +18,20 @@
import { PatchReplacement, ReplaceFn } from "./types"; import { PatchReplacement, ReplaceFn } from "./types";
export function canonicalizeMatch(match: RegExp | string) { export function canonicalizeMatch<T extends RegExp | string>(match: T): T {
if (typeof match === "string") return match; if (typeof match === "string") return match;
const canonSource = match.source const canonSource = match.source
.replaceAll("\\i", "[A-Za-z_$][\\w$]*"); .replaceAll("\\i", "[A-Za-z_$][\\w$]*");
return new RegExp(canonSource, match.flags); return new RegExp(canonSource, match.flags) as T;
} }
export function canonicalizeReplace(replace: string | ReplaceFn, pluginName: string): string | ReplaceFn { export function canonicalizeReplace<T extends string | ReplaceFn>(replace: T, pluginName: string): T {
const self = `Vencord.Plugins.plugins[${JSON.stringify(pluginName)}]`; const self = `Vencord.Plugins.plugins[${JSON.stringify(pluginName)}]`;
if (typeof replace !== "function") if (typeof replace !== "function")
return replace.replaceAll("$self", self); return replace.replaceAll("$self", self) as T;
return (...args) => replace(...args).replaceAll("$self", self); return ((...args) => replace(...args).replaceAll("$self", self)) as T;
} }
export function canonicalizeDescriptor<T>(descriptor: TypedPropertyDescriptor<T>, canonicalize: (value: T) => T) { export function canonicalizeDescriptor<T>(descriptor: TypedPropertyDescriptor<T>, canonicalize: (value: T) => T) {

View file

@ -36,7 +36,7 @@ export let Tooltip: t.Tooltip;
export let TextInput: t.TextInput; export let TextInput: t.TextInput;
export let TextArea: t.TextArea; export let TextArea: t.TextArea;
export let Text: t.Text; export let Text: t.Text;
export let Heading: t.HeadingTag; export let Heading: t.Heading;
export let Select: t.Select; export let Select: t.Select;
export let SearchableSelect: t.SearchableSelect; export let SearchableSelect: t.SearchableSelect;
export let Slider: t.Slider; export let Slider: t.Slider;

View file

@ -26,6 +26,9 @@ export let FluxDispatcher: t.FluxDispatcher;
waitFor(["dispatch", "subscribe"], m => { waitFor(["dispatch", "subscribe"], m => {
FluxDispatcher = m; FluxDispatcher = m;
// Non import call to avoid circular dependency
Vencord.Plugins.subscribeAllPluginsFluxEvents(m);
const cb = () => { const cb = () => {
m.unsubscribe("CONNECTION_OPEN", cb); m.unsubscribe("CONNECTION_OPEN", cb);
_resolveReady(); _resolveReady();

View file

@ -18,42 +18,95 @@
import { WEBPACK_CHUNK } from "@utils/constants"; import { WEBPACK_CHUNK } from "@utils/constants";
import { Logger } from "@utils/Logger"; import { Logger } from "@utils/Logger";
import { canonicalizeReplacement } from "@utils/patches"; import { canonicalizeMatch, canonicalizeReplacement } from "@utils/patches";
import { PatchReplacement } from "@utils/types"; import { PatchReplacement } from "@utils/types";
import { WebpackInstance } from "discord-types/other";
import { traceFunction } from "../debug/Tracer"; import { traceFunction } from "../debug/Tracer";
import { _initWebpack } from "."; import { patches } from "../plugins";
import { _initWebpack, beforeInitListeners, factoryListeners, moduleListeners, subscriptions, wreq } from ".";
const logger = new Logger("WebpackInterceptor", "#8caaee");
const initCallbackRegex = canonicalizeMatch(/{return \i\(".+?"\)}/);
let webpackChunk: any[]; let webpackChunk: any[];
const logger = new Logger("WebpackInterceptor", "#8caaee"); // Patch the window webpack chunk setter to monkey patch the push method before any chunks are pushed
// This way we can patch the factory of everything being pushed to the modules array
if (window[WEBPACK_CHUNK]) {
logger.info(`Patching ${WEBPACK_CHUNK}.push (was already existent, likely from cache!)`);
_initWebpack(window[WEBPACK_CHUNK]);
patchPush(window[WEBPACK_CHUNK]);
} else {
Object.defineProperty(window, WEBPACK_CHUNK, { Object.defineProperty(window, WEBPACK_CHUNK, {
configurable: true,
get: () => webpackChunk, get: () => webpackChunk,
set: v => { set: v => {
if (v?.push) { if (v?.push) {
if (!v.push.$$vencordOriginal) { if (!v.push.$$vencordOriginal) {
logger.info(`Patching ${WEBPACK_CHUNK}.push`); logger.info(`Patching ${WEBPACK_CHUNK}.push`);
patchPush(v); patchPush(v);
}
if (_initWebpack(v)) {
logger.info("Successfully initialised Vencord webpack");
// @ts-ignore // @ts-ignore
delete window[WEBPACK_CHUNK]; delete window[WEBPACK_CHUNK];
window[WEBPACK_CHUNK] = v; window[WEBPACK_CHUNK] = v;
} }
} }
webpackChunk = v; webpackChunk = v;
}, }
});
// wreq.O is the webpack onChunksLoaded function
// Discord uses it to await for all the chunks to be loaded before initializing the app
// We monkey patch it to also monkey patch the initialize app callback to get immediate access to the webpack require and run our listeners before doing it
Object.defineProperty(Function.prototype, "O", {
configurable: true,
set(onChunksLoaded: any) {
// When using react devtools or other extensions, or even when discord loads the sentry, we may also catch their webpack here.
// This ensures we actually got the right one
// this.e (wreq.e) is the method for loading a chunk, and only the main webpack has it
if (new Error().stack?.includes("discord.com") && String(this.e).includes("Promise.all")) {
logger.info("Found main WebpackRequire.onChunksLoaded");
delete (Function.prototype as any).O;
const originalOnChunksLoaded = onChunksLoaded;
onChunksLoaded = function (this: unknown, result: any, chunkIds: string[], callback: () => any, priority: number) {
if (callback != null && initCallbackRegex.test(callback.toString())) {
Object.defineProperty(this, "O", {
value: originalOnChunksLoaded,
configurable: true configurable: true
}); });
const wreq = this as WebpackInstance;
const originalCallback = callback;
callback = function (this: unknown) {
logger.info("Patched initialize app callback invoked, initializing our internal references to WebpackRequire and running beforeInitListeners");
_initWebpack(wreq);
for (const beforeInitListener of beforeInitListeners) {
beforeInitListener(wreq);
}
originalCallback.apply(this, arguments as any);
};
callback.toString = originalCallback.toString.bind(originalCallback);
arguments[2] = callback;
}
originalOnChunksLoaded.apply(this, arguments as any);
};
onChunksLoaded.toString = originalOnChunksLoaded.toString.bind(originalOnChunksLoaded);
}
Object.defineProperty(this, "O", {
value: onChunksLoaded,
configurable: true
});
}
});
// wreq.m is the webpack module factory. // wreq.m is the webpack module factory.
// normally, this is populated via webpackGlobal.push, which we patch below. // normally, this is populated via webpackGlobal.push, which we patch below.
// However, Discord has their .m prepopulated. // However, Discord has their .m prepopulated.
@ -62,22 +115,23 @@ if (window[WEBPACK_CHUNK]) {
// Update: Discord now has TWO webpack instances. Their normal one and sentry // Update: Discord now has TWO webpack instances. Their normal one and sentry
// Sentry does not push chunks to the global at all, so this same patch now also handles their sentry modules // Sentry does not push chunks to the global at all, so this same patch now also handles their sentry modules
Object.defineProperty(Function.prototype, "m", { Object.defineProperty(Function.prototype, "m", {
configurable: true,
set(v: any) { set(v: any) {
// When using react devtools or other extensions, we may also catch their webpack here. // When using react devtools or other extensions, we may also catch their webpack here.
// This ensures we actually got the right one // This ensures we actually got the right one
if (new Error().stack?.includes("discord.com")) { const error = new Error();
logger.info("Found webpack module factory"); if (error.stack?.includes("discord.com")) {
logger.info("Found Webpack module factory", error.stack.match(/\/assets\/(.+?\.js)/)?.[1] ?? "");
patchFactories(v); patchFactories(v);
} }
Object.defineProperty(this, "m", { Object.defineProperty(this, "m", {
value: v, value: v,
configurable: true,
});
},
configurable: true configurable: true
}); });
} }
});
function patchPush(webpackGlobal: any) { function patchPush(webpackGlobal: any) {
function handlePush(chunk: any) { function handlePush(chunk: any) {
@ -91,6 +145,7 @@ function patchPush(webpackGlobal: any) {
} }
handlePush.$$vencordOriginal = webpackGlobal.push; handlePush.$$vencordOriginal = webpackGlobal.push;
handlePush.toString = handlePush.$$vencordOriginal.toString.bind(handlePush.$$vencordOriginal);
// Webpack overwrites .push with its own push like so: `d.push = n.bind(null, d.push.bind(d));` // Webpack overwrites .push with its own push like so: `d.push = n.bind(null, d.push.bind(d));`
// it wraps the old push (`d.push.bind(d)`). this old push is in this case our handlePush. // it wraps the old push (`d.push.bind(d)`). this old push is in this case our handlePush.
// If we then repatched the new push, we would end up with recursive patching, which leads to our patches // If we then repatched the new push, we would end up with recursive patching, which leads to our patches
@ -99,41 +154,41 @@ function patchPush(webpackGlobal: any) {
handlePush.bind = (...args: unknown[]) => handlePush.$$vencordOriginal.bind(...args); handlePush.bind = (...args: unknown[]) => handlePush.$$vencordOriginal.bind(...args);
Object.defineProperty(webpackGlobal, "push", { Object.defineProperty(webpackGlobal, "push", {
configurable: true,
get: () => handlePush, get: () => handlePush,
set(v) { set(v) {
handlePush.$$vencordOriginal = v; handlePush.$$vencordOriginal = v;
}, }
configurable: true
}); });
} }
function patchFactories(factories: Record<string | number, (module: { exports: any; }, exports: any, require: any) => void>) { let webpackNotInitializedLogged = false;
const { subscriptions, listeners } = Vencord.Webpack;
const { patches } = Vencord.Plugins;
function patchFactories(factories: Record<string, (module: any, exports: any, require: WebpackInstance) => void>) {
for (const id in factories) { for (const id in factories) {
let mod = factories[id]; let mod = factories[id];
// Discords Webpack chunks for some ungodly reason contain random
// newlines. Cyn recommended this workaround and it seems to work fine,
// however this could potentially break code, so if anything goes weird,
// this is probably why.
// Additionally, `[actual newline]` is one less char than "\n", so if Discord
// ever targets newer browsers, the minifier could potentially use this trick and
// cause issues.
//
// 0, prefix is to turn it into an expression: 0,function(){} would be invalid syntax without the 0,
let code: string = "0," + mod.toString().replaceAll("\n", "");
const originalMod = mod; const originalMod = mod;
const patchedBy = new Set(); const patchedBy = new Set();
const factory = factories[id] = function (module, exports, require) { const factory = factories[id] = function (module: any, exports: any, require: WebpackInstance) {
if (wreq == null && IS_DEV) {
if (!webpackNotInitializedLogged) {
webpackNotInitializedLogged = true;
logger.error("WebpackRequire was not initialized, running modules without patches instead.");
}
return void originalMod(module, exports, require);
}
try { try {
mod(module, exports, require); mod(module, exports, require);
} catch (err) { } catch (err) {
// Just rethrow discord errors // Just rethrow discord errors
if (mod === originalMod) throw err; if (mod === originalMod) throw err;
logger.error("Error in patched chunk", err); logger.error("Error in patched module", err);
return void originalMod(module, exports, require); return void originalMod(module, exports, require);
} }
@ -153,11 +208,11 @@ function patchFactories(factories: Record<string | number, (module: { exports: a
return; return;
} }
for (const callback of listeners) { for (const callback of moduleListeners) {
try { try {
callback(exports, id); callback(exports, id);
} catch (err) { } catch (err) {
logger.error("Error in webpack listener", err); logger.error("Error in Webpack module listener:\n", err, callback);
} }
} }
@ -171,30 +226,48 @@ function patchFactories(factories: Record<string | number, (module: { exports: a
callback(exports.default, id); callback(exports.default, id);
} }
} catch (err) { } catch (err) {
logger.error("Error while firing callback for webpack chunk", err); logger.error("Error while firing callback for Webpack subscription:\n", err, filter, callback);
} }
} }
} as any as { toString: () => string, original: any, (...args: any[]): void; }; } as any as { toString: () => string, original: any, (...args: any[]): void; };
// for some reason throws some error on which calling .toString() leads to infinite recursion factory.toString = originalMod.toString.bind(originalMod);
// when you force load all chunks???
factory.toString = () => mod.toString();
factory.original = originalMod; factory.original = originalMod;
for (const factoryListener of factoryListeners) {
try {
factoryListener(originalMod);
} catch (err) {
logger.error("Error in Webpack factory listener:\n", err, factoryListener);
}
}
// Discords Webpack chunks for some ungodly reason contain random
// newlines. Cyn recommended this workaround and it seems to work fine,
// however this could potentially break code, so if anything goes weird,
// this is probably why.
// Additionally, `[actual newline]` is one less char than "\n", so if Discord
// ever targets newer browsers, the minifier could potentially use this trick and
// cause issues.
//
// 0, prefix is to turn it into an expression: 0,function(){} would be invalid syntax without the 0,
let code: string = "0," + mod.toString().replaceAll("\n", "");
for (let i = 0; i < patches.length; i++) { for (let i = 0; i < patches.length; i++) {
const patch = patches[i]; const patch = patches[i];
const executePatch = traceFunction(`patch by ${patch.plugin}`, (match: string | RegExp, replace: string) => code.replace(match, replace));
if (patch.predicate && !patch.predicate()) continue; if (patch.predicate && !patch.predicate()) continue;
if (!code.includes(patch.find)) continue;
if (code.includes(patch.find)) {
patchedBy.add(patch.plugin); patchedBy.add(patch.plugin);
const executePatch = traceFunction(`patch by ${patch.plugin}`, (match: string | RegExp, replace: string) => code.replace(match, replace));
const previousMod = mod; const previousMod = mod;
const previousCode = code; const previousCode = code;
// we change all patch.replacement to array in plugins/index // We change all patch.replacement to array in plugins/index
for (const replacement of patch.replacement as PatchReplacement[]) { for (const replacement of patch.replacement as PatchReplacement[]) {
if (replacement.predicate && !replacement.predicate()) continue; if (replacement.predicate && !replacement.predicate()) continue;
const lastMod = mod; const lastMod = mod;
const lastCode = code; const lastCode = code;
@ -212,15 +285,17 @@ function patchFactories(factories: Record<string | number, (module: { exports: a
if (patch.group) { if (patch.group) {
logger.warn(`Undoing patch group ${patch.find} by ${patch.plugin} because replacement ${replacement.match} had no effect`); logger.warn(`Undoing patch group ${patch.find} by ${patch.plugin} because replacement ${replacement.match} had no effect`);
code = previousCode;
mod = previousMod; mod = previousMod;
code = previousCode;
patchedBy.delete(patch.plugin); patchedBy.delete(patch.plugin);
break; break;
} }
} else {
continue;
}
code = newCode; code = newCode;
mod = (0, eval)(`// Webpack Module ${id} - Patched by ${[...patchedBy].join(", ")}\n${newCode}\n//# sourceURL=WebpackModule${id}`); mod = (0, eval)(`// Webpack Module ${id} - Patched by ${[...patchedBy].join(", ")}\n${newCode}\n//# sourceURL=WebpackModule${id}`);
}
} catch (err) { } catch (err) {
logger.error(`Patch by ${patch.plugin} errored (Module id is ${id}): ${replacement.match}\n`, err); logger.error(`Patch by ${patch.plugin} errored (Module id is ${id}): ${replacement.match}\n`, err);
@ -258,15 +333,16 @@ function patchFactories(factories: Record<string | number, (module: { exports: a
} }
patchedBy.delete(patch.plugin); patchedBy.delete(patch.plugin);
if (patch.group) { if (patch.group) {
logger.warn(`Undoing patch group ${patch.find} by ${patch.plugin} because replacement ${replacement.match} errored`); logger.warn(`Undoing patch group ${patch.find} by ${patch.plugin} because replacement ${replacement.match} errored`);
code = previousCode;
mod = previousMod; mod = previousMod;
code = previousCode;
break; break;
} }
code = lastCode;
mod = lastMod; mod = lastMod;
code = lastCode;
} }
} }
@ -274,4 +350,3 @@ function patchFactories(factories: Record<string | number, (module: { exports: a
} }
} }
} }
}

View file

@ -68,20 +68,16 @@ export const filters = {
} }
}; };
export const subscriptions = new Map<FilterFn, CallbackFn>();
export const listeners = new Set<CallbackFn>();
export type CallbackFn = (mod: any, id: string) => void; export type CallbackFn = (mod: any, id: string) => void;
export function _initWebpack(instance: typeof window.webpackChunkdiscord_app) { export const subscriptions = new Map<FilterFn, CallbackFn>();
if (cache !== void 0) throw "no."; export const moduleListeners = new Set<CallbackFn>();
export const factoryListeners = new Set<(factory: (module: any, exports: any, require: WebpackInstance) => void) => void>();
export const beforeInitListeners = new Set<(wreq: WebpackInstance) => void>();
instance.push([[Symbol("Vencord")], {}, r => wreq = r]); export function _initWebpack(webpackRequire: WebpackInstance) {
instance.pop(); wreq = webpackRequire;
if (!wreq) return false; cache = webpackRequire.c;
cache = wreq.c;
return true;
} }
let devToolsOpen = false; let devToolsOpen = false;
@ -425,7 +421,7 @@ export async function extractAndLoadChunks(code: string[], matcher: RegExp = Def
const match = module.toString().match(canonicalizeMatch(matcher)); const match = module.toString().match(canonicalizeMatch(matcher));
if (!match) { if (!match) {
const err = new Error("extractAndLoadChunks: Couldn't find entry point id in module factory code"); const err = new Error("extractAndLoadChunks: Couldn't find chunk loading in module factory code");
logger.warn(err, "Code:", code, "Matcher:", matcher); logger.warn(err, "Code:", code, "Matcher:", matcher);
// Strict behaviour in DevBuilds to fail early and make sure the issue is found // Strict behaviour in DevBuilds to fail early and make sure the issue is found
@ -491,14 +487,6 @@ export function waitFor(filter: string | string[] | FilterFn, callback: Callback
subscriptions.set(filter, callback); subscriptions.set(filter, callback);
} }
export function addListener(callback: CallbackFn) {
listeners.add(callback);
}
export function removeListener(callback: CallbackFn) {
listeners.delete(callback);
}
/** /**
* Search modules by keyword. This searches the factory methods, * Search modules by keyword. This searches the factory methods,
* meaning you can search all sorts of things, displayName, methodName, strings somewhere in the code, etc * meaning you can search all sorts of things, displayName, methodName, strings somewhere in the code, etc