Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions src/platforms/slack/token-extractor.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,58 @@ function createCookiesDb(
db.close()
}

describe('TokenExtractor token deduplication', () => {
test('keeps first token per team and upgrades unknown team name', async () => {
// given — two .log entries for the same team: first has unknown name, second has a name
const slackDir = mkdtempSync(join(tmpdir(), 'slack-dedup-'))
tempDirs.push(slackDir)

const hex64a = 'a'.repeat(64)
const hex64b = 'b'.repeat(64)
const tokenA = `xoxc-1111111111-2222222222-3333333333-${hex64a}`
const tokenB = `xoxc-4444444444-5555555555-6666666666-${hex64b}`

const leveldbDir = join(slackDir, 'Local Storage', 'leveldb')
mkdirSync(leveldbDir, { recursive: true })
// First entry: tokenA with team ID but no name
// Second entry: tokenB with same team ID and a name
writeFileSync(join(leveldbDir, '000001.log'), `"${tokenA}"T12345678xxx"${tokenB}"T12345678"name":"workspace-name"`)

// when
const extractor = new TokenExtractor('darwin', slackDir)
const result = await extractor.extract()

// then — first token wins, but team name is upgraded
expect(result.length).toBe(1)
expect(result[0].token).toBe(tokenA)
expect(result[0].workspace_name).toBe('workspace-name')
})

test('prefers .log tokens over .ldb tokens for same team', async () => {
// given — same team ID in both .log (fresh) and .ldb (stale)
const slackDir = mkdtempSync(join(tmpdir(), 'slack-dedup-order-'))
tempDirs.push(slackDir)

const hex64fresh = 'f'.repeat(64)
const hex64stale = 's'.repeat(64)
const freshToken = `xoxc-1111111111-2222222222-3333333333-${hex64fresh}`
const staleToken = `xoxc-9999999999-8888888888-7777777777-${hex64stale}`

const leveldbDir = join(slackDir, 'Local Storage', 'leveldb')
mkdirSync(leveldbDir, { recursive: true })
writeFileSync(join(leveldbDir, '000001.log'), `"${freshToken}"T12345678"name":"fresh-workspace"`)
writeFileSync(join(leveldbDir, '000002.ldb'), `"${staleToken}"T12345678"team_name":"stale-workspace"`)

// when
const extractor = new TokenExtractor('darwin', slackDir)
const result = await extractor.extract()

// then — .log token wins
expect(result.length).toBe(1)
expect(result[0].token).toBe(freshToken)
})
})

describe('TokenExtractor LevelDB fragmentation markers', () => {
function buildFragmentedLdbContent(tokenParts: string[], marker: number[]): Buffer {
// given — build binary content simulating LevelDB fragmentation:
Expand Down
15 changes: 12 additions & 3 deletions src/platforms/slack/token-extractor.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { execSync } from 'node:child_process'
import { createDecipheriv, pbkdf2Sync } from 'node:crypto'
import { copyFileSync, existsSync, readdirSync, readFileSync, rmSync } from 'node:fs'
import { copyFileSync, existsSync, readdirSync, readFileSync, rmSync, statSync } from 'node:fs'
import { createRequire } from 'node:module'
import { homedir, tmpdir } from 'node:os'
import { join } from 'node:path'
Expand Down Expand Up @@ -142,8 +142,11 @@ export class TokenExtractor {
private deduplicateTokens(tokens: TokenInfo[]): TokenInfo[] {
const seen = new Map<string, TokenInfo>()
for (const token of tokens) {
if (!seen.has(token.teamId) || token.teamName !== 'unknown') {
const existing = seen.get(token.teamId)
if (!existing) {
seen.set(token.teamId, token)
} else if (existing.teamName === 'unknown' && token.teamName !== 'unknown') {
seen.set(token.teamId, { ...token, token: existing.token })
}
}
return Array.from(seen.values())
Expand Down Expand Up @@ -219,7 +222,13 @@ export class TokenExtractor {
// Prioritize .log files (not compacted, have clean data)
// Then fall back to .ldb files
const logFiles = readdirSync(dbPath).filter((f) => f.endsWith('.log'))
const ldbFiles = readdirSync(dbPath).filter((f) => f.endsWith('.ldb'))
const ldbFiles = readdirSync(dbPath)
.filter((f) => f.endsWith('.ldb'))
.sort((a, b) => {
const statA = statSync(join(dbPath, a))
const statB = statSync(join(dbPath, b))
return statB.mtimeMs - statA.mtimeMs
})
const files = [...logFiles, ...ldbFiles]

for (const file of files) {
Expand Down