Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3,459 changes: 1,719 additions & 1,740 deletions package-lock.json

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"test:llm": "find src -name '*.llmtest.ts' | xargs tsx --test --test-concurrency=1",
"test:openai": "npm run build && find src -name '*.openaitest.ts' | xargs tsx --test",
"test:gemini": "npm run build && find src -name '*.geminitest.ts' | xargs tsx --test",
"test:ollama": "npm run build && find src -name '*.ollamatest.ts' | xargs tsx --test",
"lint": "npm run lint:prettier && npm run lint:eslint",
"lint:prettier": "prettier --check src/* src/**/*",
"lint:eslint": "eslint src/* src/**/*",
Expand Down Expand Up @@ -57,6 +58,7 @@
"dotenv": "^16.4.5",
"ipull": "^3.9.0",
"node-llama-cpp": "^3.0.0-beta.40",
"ollama": "^0.6.0",
"openai": "^4.55.1",
"tsx": "^4.16.2",
"typescript": "^5.5.4",
Expand All @@ -71,5 +73,8 @@
"pkgroll": "^2.4.2",
"prettier": "^3.3.3",
"typescript-eslint": "^8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-win32-x64-msvc": "^4.52.5"
}
}
44 changes: 44 additions & 0 deletions src/commands/ollama.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { cli } from "../cli.js";
import prettier from "../plugins/prettier.js";
import { unminify } from "../unminify.js";
import babel from "../plugins/babel/babel.js";
import { ollamaRename } from "../plugins/ollama/ollama-rename.js";
import { verbose } from "../verbose.js";
import { env } from "../env.js";
import { parseNumber } from "../number-utils.js";
import { DEFAULT_CONTEXT_WINDOW_SIZE } from "./default-args.js";

export const ollama = cli()
.name("ollama")
.description("Use Ollama to unminify code")
.option("-m, --model <model>", "The model to use", "gpt-oss:20b")
.option("-o, --outputDir <output>", "The output directory", "output")
.option(
"--baseURL <baseURL>",
"The Ollama base server URL.",
env("OLLAMA_BASE_URL") ?? "http://localhost:11434"
)
.option("--verbose", "Show verbose output")
.option(
"--contextSize <contextSize>",
"The context size to use for the LLM",
`${DEFAULT_CONTEXT_WINDOW_SIZE}`
)
.argument("input", "The input minified Javascript file")
.action(async (filename, opts) => {
if (opts.verbose) {
verbose.enabled = true;
}

const baseURL = opts.baseURL;
const contextWindowSize = parseNumber(opts.contextSize);
await unminify(filename, opts.outputDir, [
babel,
ollamaRename({
baseURL,
model: opts.model,
contextWindowSize
}),
prettier
]);
});
4 changes: 3 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,17 @@ import { version } from "../package.json";
import { download } from "./commands/download.js";
import { local } from "./commands/local.js";
import { openai } from "./commands/openai.js";
import { ollama } from "./commands/ollama.js";
import { cli } from "./cli.js";
import { azure } from "./commands/gemini.js";

cli()
.name("humanify")
.description("Unminify code using OpenAI's API or a local LLM")
.description("Unminify code using OpenAI's API, Ollama, or a local LLM")
.version(version)
.addCommand(local)
.addCommand(openai)
.addCommand(ollama)
.addCommand(azure)
.addCommand(download())
.parse(process.argv);
53 changes: 53 additions & 0 deletions src/plugins/ollama/ollama-rename.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { Ollama } from "ollama";
import { visitAllIdentifiers } from "../local-llm-rename/visit-all-identifiers.js";
import { showPercentage } from "../../progress.js";
import { verbose } from "../../verbose.js";

export function ollamaRename({
baseURL,
model,
contextWindowSize
}: {
baseURL?: string;
model: string;
contextWindowSize: number;
}) {
const client = new Ollama({ host: baseURL });

return async (code: string): Promise<string> => {
return await visitAllIdentifiers(
code,
async (name, surroundingCode) => {
verbose.log(`Renaming ${name}`);
verbose.log("Context: ", surroundingCode);

const response = await client.chat({
model,
messages: [
{
role: "system",
content: `Rename Javascript variables/function \`${name}\` to have descriptive name based on their usage in the code. Respond only with valid JSON in the format: {"newName": "your_new_name_here"}`
},
{
role: "user",
content: surroundingCode
}
],
format: "json"
});

const result = response.message?.content;
if (!result) {
throw new Error("Failed to rename", { cause: response });
}
const renamed = JSON.parse(result).newName;

verbose.log(`Renamed to ${renamed}`);

return renamed;
},
contextWindowSize,
showPercentage
);
};
}
7 changes: 6 additions & 1 deletion src/test-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,12 @@ export function assertMatches(actual: string, expected: string[]) {

export async function humanify(...argv: string[]) {
const extraArgs = argv.includes("local") ? ["--seed", "1"] : [];
const process = spawn("./dist/index.mjs", [...argv, ...extraArgs]);
// Try using built dist first, fall back to tsx if dist doesn't exist
const { existsSync } = await import("fs");
const command = existsSync("./dist/index.mjs") ? "./dist/index.mjs" : "npx";
const args = existsSync("./dist/index.mjs") ? [...argv, ...extraArgs] : ["tsx", "src/index.ts", ...argv, ...extraArgs];

const process = spawn(command, args, { shell: true });
const stdout: string[] = [];
const stderr: string[] = [];
process.stdout.on("data", (data) => stdout.push(data.toString()));
Expand Down
60 changes: 60 additions & 0 deletions src/test/e2e.ollamatest.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import test from "node:test";
import { readFile, rm } from "node:fs/promises";
import assert from "node:assert";
import { humanify } from "../test-utils.js";
import { Ollama } from "ollama";

const TEST_OUTPUT_DIR = "test-output";
const TEST_MODEL = "gpt-oss:20b";

test.afterEach(async () => {
await rm(TEST_OUTPUT_DIR, { recursive: true, force: true });
});

test("Unminifies an example file successfully", async () => {
const ollama = new Ollama({ host: "http://localhost:11434" });

const fileIsMinified = async (filename: string) => {
const code = await readFile(filename, "utf-8");
const response = await ollama.chat({
model: TEST_MODEL,
messages: [
{
role: "user",
content: `Read the following code and rate its readability and variable names. Answer only with one word: "EXCELLENT", "GOOD", or "UNREADABLE".\n\nCode:\n${code}`
}
]
});
return response.message.content.trim();
};

const expectStartsWith = (expected: string[], actual: string) => {
assert(
expected.some((e) => actual.toUpperCase().startsWith(e)),
`Expected one of "${expected}" but got "${actual}"`
);
};

// Check the minified file readability (might be GOOD or UNREADABLE depending on model)
const minifiedReadability = await fileIsMinified(`fixtures/example.min.js`);
console.log(`Minified file rated as: ${minifiedReadability}`);

await humanify(
"ollama",
"fixtures/example.min.js",
"-m",
TEST_MODEL,
"--verbose",
"--outputDir",
TEST_OUTPUT_DIR
);

const deobfuscatedReadability = await fileIsMinified(`${TEST_OUTPUT_DIR}/deobfuscated.js`);
console.log(`Deobfuscated file rated as: ${deobfuscatedReadability}`);

// The deobfuscated file should be rated EXCELLENT or GOOD
await expectStartsWith(
["EXCELLENT", "GOOD"],
deobfuscatedReadability
);
});
Loading