diff --git a/docs/SOLUTION_CONTRACT.md b/docs/SOLUTION_CONTRACT.md index ed4d02f..24b6172 100644 --- a/docs/SOLUTION_CONTRACT.md +++ b/docs/SOLUTION_CONTRACT.md @@ -176,6 +176,133 @@ SOLUTIONS = { } ``` +### A.7 Solution Comment Format + +Solutions SHOULD include structured comments to explain the algorithm, approach, and key insights. This section defines the standard comment format. + +#### A.7.1 File-Level Docstring + +Every solution file SHOULD start with a docstring describing the problem: + +```python +""" +Problem: Two Sum +Link: https://leetcode.com/problems/two-sum/ + +Given an array of integers nums and an integer target, return indices +of the two numbers such that they add up to target. + +Constraints: +- 2 <= nums.length <= 10^4 +- -10^9 <= nums[i] <= 10^9 +- -10^9 <= target <= 10^9 +- Only one valid answer exists. +""" +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `Problem` | ✅ | Problem title | +| `Link` | ✅ | LeetCode URL | +| Description | Recommended | Brief problem statement | +| `Constraints` | Recommended | Key constraints affecting algorithm choice | + +#### A.7.2 Solution Block Comments + +Each solution class SHOULD be preceded by a block comment explaining the approach. + +**No blank line** between the comment block and the class definition: + +```python +# ============================================================================ +# Solution 1: Sliding Window (Optimized with Jump) +# Time: O(n), Space: O(min(n, σ)) +# - Each character visited at most twice +# - Uses last-seen-index array for O(1) duplicate detection +# - Direct position jumping instead of incremental contraction +# ============================================================================ +class SolutionSlidingWindow: # ← 緊接著,無空行 + ... +``` + +**Format:** + +``` +# ============================================ +# Solution {N}: {Approach Name} +# Time: O(?), Space: O(?) +# - {Key insight or implementation detail} +# - {Additional notes} +# ============================================ +class ClassName: # ← No blank line before class/function +``` + +| Component | Required | Description | +|-----------|----------|-------------| +| Solution number & name | ✅ | e.g., `Solution 1: Sliding Window` | +| Time/Space complexity | ✅ | e.g., `Time: O(n), Space: O(n)` | +| Bullet points | Recommended | Key insights, implementation details | +| **No blank line** | ✅ | Comment block directly followed by class/function | + +**More examples:** + +```python +# ============================================ +# Solution 1: Single Pass +# Time: O(max(m,n)), Space: O(max(m,n)) +# - Single pass through both lists +# - Result list has at most max(m,n) + 1 nodes +# ============================================ +class Solution: + ... +``` + +```python +# ============================================================================ +# Solution 2: Using Dictionary (More Flexible for Unicode) +# Time: O(n), Space: O(min(n, σ)) +# - Same sliding window approach with dictionary instead of array +# - More flexible for Unicode strings but slightly slower +# ============================================================================ +class SolutionDict: + ... +``` + +```python +# ============================================================================ +# Solution 3: Using Set (Standard While-Loop Pattern) +# Time: O(n), Space: O(min(n, σ)) +# - Uses set to track current window characters +# - Demonstrates standard while-loop contraction pattern +# ============================================================================ +class SolutionSet: + ... +``` + +#### A.7.3 JUDGE_FUNC Comments (Optional) + +When defining a `JUDGE_FUNC`, you MAY include a block comment explaining its purpose and complexity. + +Same rule: **no blank line** between comment and function: + +```python +# ============================================ +# JUDGE_FUNC - Required for generator support +# ============================================ +# Uses brute force O(m+n) merge to compute the correct answer, +# then compares with the solution output. +# ============================================ +def judge(actual, expected, input_data: str) -> bool: # ← 無空行 + ... + +JUDGE_FUNC = judge +``` + +This is optional but recommended when: +- The judge uses a different algorithm than the solution +- The judge has notable complexity characteristics +- Generator support requires custom validation + --- ## B. SOLUTIONS Metadata Schema @@ -629,6 +756,12 @@ When adding or modifying a solution, verify: """ Problem: {Problem Title} Link: https://leetcode.com/problems/{slug}/ + +{Brief problem description} + +Constraints: +- {constraint 1} +- {constraint 2} """ from typing import List from _runner import get_solver @@ -652,9 +785,12 @@ SOLUTIONS = { } # ============================================ -# Solution class(es) +# Solution 1: {Approach Name} +# Time: O(?), Space: O(?) +# - {Key insight or implementation detail} +# - {Additional notes} # ============================================ -class Solution: +class Solution: # ← No blank line after comment block def methodName(self, ...): ... diff --git a/generators/0039_combination_sum.py b/generators/0039_combination_sum.py new file mode 100644 index 0000000..3991c6b --- /dev/null +++ b/generators/0039_combination_sum.py @@ -0,0 +1,62 @@ +# generators/0039_combination_sum.py +""" +Test Case Generator for Problem 0039 - Combination Sum + +LeetCode Constraints: +- 1 <= candidates.length <= 30 +- 2 <= candidates[i] <= 40 +- All elements of candidates are distinct +- 1 <= target <= 40 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Combination Sum. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: candidates\\ntarget + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "2,3,6,7\n7", # Classic example + "2,3,5\n8", # Multiple combinations + "2\n1", # No solution + "7,8,9\n7", # Single element solution + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case.""" + # Random number of candidates (2-15 for reasonable test size) + n = random.randint(2, 15) + + # Generate distinct candidates in range [2, 40] + candidates = random.sample(range(2, 41), min(n, 39)) + + # Generate target that is likely achievable + min_candidate = min(candidates) + target = random.randint(min_candidate, 40) + + candidates_str = ','.join(map(str, candidates)) + return f"{candidates_str}\n{target}" + diff --git a/generators/0040_combination_sum_ii.py b/generators/0040_combination_sum_ii.py new file mode 100644 index 0000000..dc3b604 --- /dev/null +++ b/generators/0040_combination_sum_ii.py @@ -0,0 +1,60 @@ +# generators/0040_combination_sum_ii.py +""" +Test Case Generator for Problem 0040 - Combination Sum II + +LeetCode Constraints: +- 1 <= candidates.length <= 100 +- 1 <= candidates[i] <= 50 +- 1 <= target <= 30 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Combination Sum II. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: candidates\\ntarget + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "10,1,2,7,6,1,5\n8", # Classic with duplicates + "2,5,2,1,2\n5", # Multiple duplicates + "1,1,1,1,1\n3", # All same + "2\n1", # No solution + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case with possible duplicates.""" + # Random number of candidates (2-20 for reasonable test size) + n = random.randint(2, 20) + + # Generate candidates with possible duplicates + candidates = [random.randint(1, 50) for _ in range(n)] + + # Generate target in valid range + target = random.randint(1, 30) + + candidates_str = ','.join(map(str, candidates)) + return f"{candidates_str}\n{target}" + diff --git a/generators/0046_permutations.py b/generators/0046_permutations.py new file mode 100644 index 0000000..dac5f9f --- /dev/null +++ b/generators/0046_permutations.py @@ -0,0 +1,57 @@ +# generators/0046_permutations.py +""" +Test Case Generator for Problem 0046 - Permutations + +LeetCode Constraints: +- 1 <= nums.length <= 6 +- -10 <= nums[i] <= 10 +- All the integers of nums are unique +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Permutations. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: nums (comma-separated) + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "1", # Single element + "1,2", # Two elements + "1,2,3", # Classic example + "0,-1,1", # With negatives + "1,2,3,4,5,6", # Maximum length + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case with distinct integers.""" + # Random length 1-6 + n = random.randint(1, 6) + + # Generate distinct integers in range [-10, 10] + nums = random.sample(range(-10, 11), n) + + return ','.join(map(str, nums)) + diff --git a/generators/0047_permutations_ii.py b/generators/0047_permutations_ii.py new file mode 100644 index 0000000..3a931f1 --- /dev/null +++ b/generators/0047_permutations_ii.py @@ -0,0 +1,56 @@ +# generators/0047_permutations_ii.py +""" +Test Case Generator for Problem 0047 - Permutations II + +LeetCode Constraints: +- 1 <= nums.length <= 8 +- -10 <= nums[i] <= 10 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Permutations II. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: nums (comma-separated) + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "1,1,2", # Classic with duplicates + "1,2,3", # No duplicates + "1,1,1", # All same + "1,1,2,2", # Pairs of duplicates + "0,0,0,0", # All zeros + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case with possible duplicates.""" + # Random length 1-8 + n = random.randint(1, 8) + + # Generate integers with possible duplicates + nums = [random.randint(-10, 10) for _ in range(n)] + + return ','.join(map(str, nums)) + diff --git a/generators/0052_n_queens_ii.py b/generators/0052_n_queens_ii.py new file mode 100644 index 0000000..1596192 --- /dev/null +++ b/generators/0052_n_queens_ii.py @@ -0,0 +1,39 @@ +# generators/0052_n_queens_ii.py +""" +Test Case Generator for Problem 0052 - N-Queens II + +LeetCode Constraints: +- 1 <= n <= 9 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for N-Queens II. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input - a single integer n + """ + if seed is not None: + random.seed(seed) + + # Edge cases first (all valid n values) + edge_cases = ["1", "4", "8", "9"] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + n = random.randint(1, 9) + yield str(n) + diff --git a/generators/0077_combinations.py b/generators/0077_combinations.py new file mode 100644 index 0000000..dd305cb --- /dev/null +++ b/generators/0077_combinations.py @@ -0,0 +1,53 @@ +# generators/0077_combinations.py +""" +Test Case Generator for Problem 0077 - Combinations + +LeetCode Constraints: +- 1 <= n <= 20 +- 1 <= k <= n +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Combinations. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: n\\nk + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "4\n2", # Classic example + "1\n1", # Minimal + "5\n5", # k equals n + "5\n1", # k equals 1 + "10\n3", # Larger case + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case.""" + # Keep n reasonable to avoid explosion (C(20,10) is large) + n = random.randint(1, 12) + k = random.randint(1, n) + return f"{n}\n{k}" + diff --git a/generators/0078_subsets.py b/generators/0078_subsets.py new file mode 100644 index 0000000..d8cbd16 --- /dev/null +++ b/generators/0078_subsets.py @@ -0,0 +1,56 @@ +# generators/0078_subsets.py +""" +Test Case Generator for Problem 0078 - Subsets + +LeetCode Constraints: +- 1 <= nums.length <= 10 +- -10 <= nums[i] <= 10 +- All the numbers of nums are unique +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Subsets. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: nums (comma-separated) + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "1,2,3", # Classic example + "0", # Single element + "1,2", # Two elements + "1,2,3,4", # Four elements + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case with unique integers.""" + # Random length 1-10 + n = random.randint(1, 10) + + # Generate distinct integers in range [-10, 10] + nums = random.sample(range(-10, 11), n) + + return ','.join(map(str, nums)) + diff --git a/generators/0079_word_search.py b/generators/0079_word_search.py new file mode 100644 index 0000000..206865a --- /dev/null +++ b/generators/0079_word_search.py @@ -0,0 +1,101 @@ +# generators/0079_word_search.py +""" +Test Case Generator for Problem 0079 - Word Search + +LeetCode Constraints: +- m == board.length +- n == board[i].length +- 1 <= m, n <= 6 +- 1 <= word.length <= 15 +- board and word consists of only lowercase and uppercase English letters +""" +import random +import string +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Word Search. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: m,n\\nrow1\\nrow2\\n...\\nword + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "3,4\nA,B,C,E\nS,F,C,S\nA,D,E,E\nABCCED", # Classic True + "3,4\nA,B,C,E\nS,F,C,S\nA,D,E,E\nSEE", # True + "3,4\nA,B,C,E\nS,F,C,S\nA,D,E,E\nABCB", # False (reuse) + "1,1\nA\nA", # Single cell + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case.""" + m = random.randint(1, 6) + n = random.randint(1, 6) + + # Generate random board + letters = string.ascii_uppercase + board = [[random.choice(letters) for _ in range(n)] for _ in range(m)] + + # Generate word - sometimes from board path, sometimes random + if random.random() < 0.5: + # Create a valid path word + word_len = random.randint(1, min(8, m * n)) + word = _generate_valid_word(board, m, n, word_len) + else: + # Random word (may or may not exist) + word_len = random.randint(1, 10) + word = ''.join(random.choice(letters) for _ in range(word_len)) + + # Format output + lines = [f"{m},{n}"] + for row in board: + lines.append(','.join(row)) + lines.append(word) + + return '\n'.join(lines) + + +def _generate_valid_word(board, m, n, length): + """Generate a word that exists in the board.""" + r, c = random.randint(0, m-1), random.randint(0, n-1) + word = [board[r][c]] + visited = {(r, c)} + + directions = [(-1, 0), (1, 0), (0, -1), (0, 1)] + + while len(word) < length: + neighbors = [] + for dr, dc in directions: + nr, nc = r + dr, c + dc + if 0 <= nr < m and 0 <= nc < n and (nr, nc) not in visited: + neighbors.append((nr, nc)) + + if not neighbors: + break + + r, c = random.choice(neighbors) + visited.add((r, c)) + word.append(board[r][c]) + + return ''.join(word) + diff --git a/generators/0090_subsets_ii.py b/generators/0090_subsets_ii.py new file mode 100644 index 0000000..f64fa37 --- /dev/null +++ b/generators/0090_subsets_ii.py @@ -0,0 +1,55 @@ +# generators/0090_subsets_ii.py +""" +Test Case Generator for Problem 0090 - Subsets II + +LeetCode Constraints: +- 1 <= nums.length <= 10 +- -10 <= nums[i] <= 10 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Subsets II. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: nums (comma-separated) + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "1,2,2", # Classic with duplicates + "0", # Single element + "1,1,1", # All same + "1,2,2,3,3", # Multiple duplicate groups + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case with possible duplicates.""" + # Random length 1-10 + n = random.randint(1, 10) + + # Generate integers with possible duplicates + nums = [random.randint(-10, 10) for _ in range(n)] + + return ','.join(map(str, nums)) + diff --git a/generators/0093_restore_ip_addresses.py b/generators/0093_restore_ip_addresses.py new file mode 100644 index 0000000..b8c19e8 --- /dev/null +++ b/generators/0093_restore_ip_addresses.py @@ -0,0 +1,56 @@ +# generators/0093_restore_ip_addresses.py +""" +Test Case Generator for Problem 0093 - Restore IP Addresses + +LeetCode Constraints: +- 1 <= s.length <= 20 +- s consists of digits only +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Restore IP Addresses. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input - a digit string + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "25525511135", # Classic example + "0000", # All zeros + "101023", # Multiple solutions + "1111", # Minimal valid + "111111111111", # All ones (12 digits) + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random digit string.""" + # Length 4-12 for reasonable IP addresses + length = random.randint(4, 12) + + # Generate random digits + digits = ''.join(str(random.randint(0, 9)) for _ in range(length)) + + return digits + diff --git a/generators/0131_palindrome_partitioning.py b/generators/0131_palindrome_partitioning.py new file mode 100644 index 0000000..68ced58 --- /dev/null +++ b/generators/0131_palindrome_partitioning.py @@ -0,0 +1,58 @@ +# generators/0131_palindrome_partitioning.py +""" +Test Case Generator for Problem 0131 - Palindrome Partitioning + +LeetCode Constraints: +- 1 <= s.length <= 16 +- s contains only lowercase English letters +""" +import random +import string +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Palindrome Partitioning. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input - a lowercase string + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "aab", # Classic example + "a", # Single char + "aa", # Two same chars + "aba", # Palindrome itself + "abcba", # Longer palindrome + "abcd", # No repeated chars + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random lowercase string.""" + # Length 1-10 (keeping small to avoid explosion) + length = random.randint(1, 10) + + # Use limited alphabet to increase palindrome chances + alphabet = "abc" # Limited for more palindromes + + return ''.join(random.choice(alphabet) for _ in range(length)) + diff --git a/generators/0216_combination_sum_iii.py b/generators/0216_combination_sum_iii.py new file mode 100644 index 0000000..707c8ac --- /dev/null +++ b/generators/0216_combination_sum_iii.py @@ -0,0 +1,59 @@ +# generators/0216_combination_sum_iii.py +""" +Test Case Generator for Problem 0216 - Combination Sum III + +LeetCode Constraints: +- 2 <= k <= 9 +- 1 <= n <= 60 +""" +import random +from typing import Iterator, Optional + + +def generate(count: int = 10, seed: Optional[int] = None) -> Iterator[str]: + """ + Generate test case inputs for Combination Sum III. + + Args: + count: Number of test cases to generate + seed: Random seed for reproducibility + + Yields: + str: Test input in the format: k\\nn + """ + if seed is not None: + random.seed(seed) + + # Edge cases first + edge_cases = [ + "3\n7", # Classic example (1+2+4=7) + "3\n9", # Multiple solutions + "4\n1", # Impossible (min sum with 4 nums is 1+2+3+4=10) + "2\n18", # 9+9 not allowed (unique), but 9+8=17 + "9\n45", # Maximum possible (1+2+...+9=45) + ] + + for edge in edge_cases: + yield edge + count -= 1 + if count <= 0: + return + + # Random cases + for _ in range(count): + yield _generate_case() + + +def _generate_case() -> str: + """Generate a single random test case.""" + k = random.randint(2, 9) + # n should be reasonable for k numbers from 1-9 + # Min sum with k nums: 1+2+...+k = k*(k+1)/2 + # Max sum with k nums: (10-k)+...+9 = (9+10-k)*k/2 + min_sum = k * (k + 1) // 2 + max_sum = (19 - k) * k // 2 + + n = random.randint(min_sum, min(60, max_sum)) + + return f"{k}\n{n}" + diff --git a/meta/problems/0039_combination_sum.toml b/meta/problems/0039_combination_sum.toml new file mode 100644 index 0000000..26b3465 --- /dev/null +++ b/meta/problems/0039_combination_sum.toml @@ -0,0 +1,52 @@ +# Problem: Combination Sum +# https://leetcode.com/problems/combination-sum/ + +# ===== Problem Info ===== +id = "0039" +slug = "0039_combination_sum" +title = "Combination Sum" +leetcode_id = 39 +url = "https://leetcode.com/problems/combination-sum/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft", "uber", "airbnb"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150", "blind_75"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0040", "0077", "0216", "0078"] + +# ===== File Locations ===== +[files] +solution = "solutions/0039_combination_sum.py" +generator = "generators/0039_combination_sum.py" +tests_dir = "tests/0039_combination_sum/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "combinationSum" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0040", "0077"] + +role = "base" +variant = "target_search_with_reuse" +based_on = [] +delta = "" +complexity = "O(n^(t/m)) time, O(t/m) space" +notes = "Backtracking with element reuse. Allow reuse by recursing with same index i (not i+1). Prune when candidate > remaining target." + diff --git a/meta/problems/0040_combination_sum_ii.toml b/meta/problems/0040_combination_sum_ii.toml new file mode 100644 index 0000000..f617913 --- /dev/null +++ b/meta/problems/0040_combination_sum_ii.toml @@ -0,0 +1,52 @@ +# Problem: Combination Sum II +# https://leetcode.com/problems/combination-sum-ii/ + +# ===== Problem Info ===== +id = "0040" +slug = "0040_combination_sum_ii" +title = "Combination Sum II" +leetcode_id = 40 +url = "https://leetcode.com/problems/combination-sum-ii/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0039", "0090", "0047", "0216"] + +# ===== File Locations ===== +[files] +solution = "solutions/0040_combination_sum_ii.py" +generator = "generators/0040_combination_sum_ii.py" +tests_dir = "tests/0040_combination_sum_ii/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "combinationSum2" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0039", "0090"] + +role = "variant" +variant = "no_reuse_with_deduplication" +based_on = ["0039#default"] +delta = "No reuse: recurse with i+1. Handle duplicates: sort + same-level skip (i > start_index and candidates[i] == candidates[i-1])." +complexity = "O(2^n) time, O(n) space" +notes = "Combines no-reuse pattern with same-level deduplication for handling duplicate candidates." + diff --git a/meta/problems/0046_permutations.toml b/meta/problems/0046_permutations.toml new file mode 100644 index 0000000..7d2d9a5 --- /dev/null +++ b/meta/problems/0046_permutations.toml @@ -0,0 +1,58 @@ +# Problem: Permutations +# https://leetcode.com/problems/permutations/ + +# ===== Problem Info ===== +id = "0046" +slug = "0046_permutations" +title = "Permutations" +leetcode_id = 46 +url = "https://leetcode.com/problems/permutations/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft", "apple", "uber"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_permutation"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0047", "0078", "0077"] + +# ===== Pattern Role ===== +[pattern_role] +is_base_template = true +base_for_kernel = "BacktrackingExploration" +derived_problems = ["0047"] + +# ===== File Locations ===== +[files] +solution = "solutions/0046_permutations.py" +generator = "generators/0046_permutations.py" +tests_dir = "tests/0046_permutations/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "permute" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_permutation"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0047"] + +role = "base" +variant = "" +based_on = [] +delta = "" +complexity = "O(n! × n) time, O(n) space" +notes = "BASE TEMPLATE for permutation enumeration. Track usage with boolean array. At each position, try every unused element. This is the canonical backtracking permutation pattern." + diff --git a/meta/problems/0047_permutations_ii.toml b/meta/problems/0047_permutations_ii.toml new file mode 100644 index 0000000..531d365 --- /dev/null +++ b/meta/problems/0047_permutations_ii.toml @@ -0,0 +1,52 @@ +# Problem: Permutations II +# https://leetcode.com/problems/permutations-ii/ + +# ===== Problem Info ===== +id = "0047" +slug = "0047_permutations_ii" +title = "Permutations II" +leetcode_id = 47 +url = "https://leetcode.com/problems/permutations-ii/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft"] + +# ===== Roadmaps ===== +roadmaps = [] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_permutation"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0046", "0040", "0090"] + +# ===== File Locations ===== +[files] +solution = "solutions/0047_permutations_ii.py" +generator = "generators/0047_permutations_ii.py" +tests_dir = "tests/0047_permutations_ii/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "permuteUnique" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_permutation"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0046"] + +role = "variant" +variant = "with_duplicates" +based_on = ["0046#default"] +delta = "Sort array + same-level deduplication: skip nums[i] if i > 0 and nums[i] == nums[i-1] and !used[i-1]. Ensures leftmost duplicate is always picked first at each level." +complexity = "O(n! × n) time, O(n) space" +notes = "Extension of base permutation pattern with duplicate handling using same-level deduplication." + diff --git a/meta/problems/0052_n_queens_ii.toml b/meta/problems/0052_n_queens_ii.toml new file mode 100644 index 0000000..3c39b67 --- /dev/null +++ b/meta/problems/0052_n_queens_ii.toml @@ -0,0 +1,71 @@ +# Problem: N-Queens II +# https://leetcode.com/problems/n-queens-ii/ + +# ===== Problem Info ===== +id = "0052" +slug = "0052_n_queens_ii" +title = "N-Queens II" +leetcode_id = 52 +url = "https://leetcode.com/problems/n-queens-ii/" + +# ===== LeetCode Official Metadata ===== +difficulty = "hard" +topics = ["backtracking"] +companies = ["google", "amazon", "meta"] + +# ===== Roadmaps ===== +roadmaps = [] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_n_queens"] +families = ["backtracking_combinatorial"] +data_structures = ["array", "hash_set"] +algorithms = ["backtracking"] +related_problems = ["0051"] + +# ===== File Locations ===== +[files] +solution = "solutions/0052_n_queens_ii.py" +generator = "generators/0052_n_queens_ii.py" +tests_dir = "tests/0052_n_queens_ii/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "SolutionSets" +method = "totalNQueens" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_n_queens"] +families = ["backtracking_combinatorial"] +data_structures = ["array", "hash_set"] +algorithms = ["backtracking"] +related_problems = ["0051"] + +role = "variant" +variant = "count_only" +based_on = ["0051#default"] +delta = "Only count solutions instead of building board representations. More memory efficient." +complexity = "O(n!) time, O(n) space" +notes = "Same algorithm as N-Queens but optimized for counting. Uses hash sets for O(1) constraint checking." + +[[solutions]] +key = "bitmask" +class = "SolutionBitmask" +method = "totalNQueens" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_n_queens"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0051"] + +role = "variant" +variant = "bitmask_optimization" +based_on = ["0052#default"] +delta = "Use integers as bitmasks for ultra-fast constraint checking. Bitwise operations are faster than hash lookups." +complexity = "O(n!) time, O(n) space" +notes = "Optimized version using bitmasks for constraint tracking. Better cache locality and smaller constants." + diff --git a/meta/problems/0077_combinations.toml b/meta/problems/0077_combinations.toml new file mode 100644 index 0000000..80baab5 --- /dev/null +++ b/meta/problems/0077_combinations.toml @@ -0,0 +1,52 @@ +# Problem: Combinations +# https://leetcode.com/problems/combinations/ + +# ===== Problem Info ===== +id = "0077" +slug = "0077_combinations" +title = "Combinations" +leetcode_id = 77 +url = "https://leetcode.com/problems/combinations/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["backtracking"] +companies = ["google", "amazon", "meta"] + +# ===== Roadmaps ===== +roadmaps = [] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0078", "0039", "0216"] + +# ===== File Locations ===== +[files] +solution = "solutions/0077_combinations.py" +generator = "generators/0077_combinations.py" +tests_dir = "tests/0077_combinations/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "combine" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0078", "0039"] + +role = "variant" +variant = "fixed_size_subset" +based_on = ["0078#default"] +delta = "Fixed-size subset enumeration. Only collect when path length equals k. Add count-based pruning: stop early if not enough elements remain." +complexity = "O(k × C(n,k)) time, O(k) space" +notes = "Similar to subsets but with fixed size constraint. Uses start_index for canonical ordering and pruning optimization." + diff --git a/meta/problems/0078_subsets.toml b/meta/problems/0078_subsets.toml new file mode 100644 index 0000000..4a45cc4 --- /dev/null +++ b/meta/problems/0078_subsets.toml @@ -0,0 +1,58 @@ +# Problem: Subsets +# https://leetcode.com/problems/subsets/ + +# ===== Problem Info ===== +id = "0078" +slug = "0078_subsets" +title = "Subsets" +leetcode_id = 78 +url = "https://leetcode.com/problems/subsets/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking", "bit_manipulation"] +companies = ["google", "amazon", "meta", "microsoft", "apple", "uber"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_subset"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0090", "0077", "0046"] + +# ===== Pattern Role ===== +[pattern_role] +is_base_template = true +base_for_kernel = "BacktrackingExploration" +derived_problems = ["0090", "0077"] + +# ===== File Locations ===== +[files] +solution = "solutions/0078_subsets.py" +generator = "generators/0078_subsets.py" +tests_dir = "tests/0078_subsets/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "subsets" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_subset"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0090", "0077"] + +role = "base" +variant = "" +based_on = [] +delta = "" +complexity = "O(n × 2^n) time, O(n) space" +notes = "BASE TEMPLATE for subset enumeration. Use start_index for canonical ordering. Every node (including empty) is a valid subset. Key difference from permutations: no 'used' array needed, collect at every node." + diff --git a/meta/problems/0079_word_search.toml b/meta/problems/0079_word_search.toml new file mode 100644 index 0000000..811060b --- /dev/null +++ b/meta/problems/0079_word_search.toml @@ -0,0 +1,52 @@ +# Problem: Word Search +# https://leetcode.com/problems/word-search/ + +# ===== Problem Info ===== +id = "0079" +slug = "0079_word_search" +title = "Word Search" +leetcode_id = 79 +url = "https://leetcode.com/problems/word-search/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking", "matrix"] +companies = ["google", "amazon", "meta", "microsoft", "apple", "uber", "bloomberg"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150", "blind_75"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_grid_path"] +families = ["backtracking_combinatorial"] +data_structures = ["grid", "array"] +algorithms = ["backtracking", "dfs"] +related_problems = ["0212", "0130", "0200"] + +# ===== File Locations ===== +[files] +solution = "solutions/0079_word_search.py" +generator = "generators/0079_word_search.py" +tests_dir = "tests/0079_word_search/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "exist" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_grid_path"] +families = ["backtracking_combinatorial"] +data_structures = ["grid", "array"] +algorithms = ["backtracking", "dfs"] +related_problems = ["0212"] + +role = "base" +variant = "grid_dfs_visited_marking" +based_on = [] +delta = "" +complexity = "O(m × n × 4^L) time, O(L) space" +notes = "Grid DFS backtracking with in-place visited marking. Mark cells as visited during exploration, unmark on backtrack. Start DFS from each cell matching word[0]." + diff --git a/meta/problems/0090_subsets_ii.toml b/meta/problems/0090_subsets_ii.toml new file mode 100644 index 0000000..c8ee4df --- /dev/null +++ b/meta/problems/0090_subsets_ii.toml @@ -0,0 +1,52 @@ +# Problem: Subsets II +# https://leetcode.com/problems/subsets-ii/ + +# ===== Problem Info ===== +id = "0090" +slug = "0090_subsets_ii" +title = "Subsets II" +leetcode_id = 90 +url = "https://leetcode.com/problems/subsets-ii/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking", "bit_manipulation"] +companies = ["google", "amazon", "meta", "microsoft"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_subset"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0078", "0040", "0047"] + +# ===== File Locations ===== +[files] +solution = "solutions/0090_subsets_ii.py" +generator = "generators/0090_subsets_ii.py" +tests_dir = "tests/0090_subsets_ii/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "subsetsWithDup" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_subset"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0078", "0040"] + +role = "variant" +variant = "with_duplicates" +based_on = ["0078#default"] +delta = "Sort array + same-level deduplication: skip nums[i] if i > start_index and nums[i] == nums[i-1]. Ensures each unique subset is generated exactly once." +complexity = "O(n × 2^n) time, O(n) space" +notes = "Extension of base subset pattern with duplicate handling. Same deduplication technique as Combination Sum II and Permutations II." + diff --git a/meta/problems/0093_restore_ip_addresses.toml b/meta/problems/0093_restore_ip_addresses.toml new file mode 100644 index 0000000..3091c80 --- /dev/null +++ b/meta/problems/0093_restore_ip_addresses.toml @@ -0,0 +1,52 @@ +# Problem: Restore IP Addresses +# https://leetcode.com/problems/restore-ip-addresses/ + +# ===== Problem Info ===== +id = "0093" +slug = "0093_restore_ip_addresses" +title = "Restore IP Addresses" +leetcode_id = 93 +url = "https://leetcode.com/problems/restore-ip-addresses/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["string", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft"] + +# ===== Roadmaps ===== +roadmaps = [] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_string_segmentation"] +families = ["backtracking_combinatorial"] +data_structures = ["string", "array"] +algorithms = ["backtracking"] +related_problems = ["0131", "0093"] + +# ===== File Locations ===== +[files] +solution = "solutions/0093_restore_ip_addresses.py" +generator = "generators/0093_restore_ip_addresses.py" +tests_dir = "tests/0093_restore_ip_addresses/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "restoreIpAddresses" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_string_segmentation"] +families = ["backtracking_combinatorial"] +data_structures = ["string", "array"] +algorithms = ["backtracking"] +related_problems = ["0131"] + +role = "base" +variant = "multi_constraint_segmentation" +based_on = [] +delta = "" +complexity = "O(3^4 × n) = O(n) time, O(1) space" +notes = "String segmentation with multi-constraint validity. IP address has exactly 4 segments, each with constraints: length 1-3, value 0-255, no leading zeros. Prune aggressively based on remaining length bounds." + diff --git a/meta/problems/0131_palindrome_partitioning.toml b/meta/problems/0131_palindrome_partitioning.toml new file mode 100644 index 0000000..f32ae70 --- /dev/null +++ b/meta/problems/0131_palindrome_partitioning.toml @@ -0,0 +1,71 @@ +# Problem: Palindrome Partitioning +# https://leetcode.com/problems/palindrome-partitioning/ + +# ===== Problem Info ===== +id = "0131" +slug = "0131_palindrome_partitioning" +title = "Palindrome Partitioning" +leetcode_id = 131 +url = "https://leetcode.com/problems/palindrome-partitioning/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["string", "dynamic_programming", "backtracking"] +companies = ["google", "amazon", "meta", "microsoft", "apple"] + +# ===== Roadmaps ===== +roadmaps = ["neetcode_150"] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_string_segmentation"] +families = ["backtracking_combinatorial"] +data_structures = ["string", "array"] +algorithms = ["backtracking", "dynamic_programming"] +related_problems = ["0093", "0005"] + +# ===== File Locations ===== +[files] +solution = "solutions/0131_palindrome_partitioning.py" +generator = "generators/0131_palindrome_partitioning.py" +tests_dir = "tests/0131_palindrome_partitioning/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "SolutionDP" +method = "partition" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_string_segmentation"] +families = ["backtracking_combinatorial"] +data_structures = ["string", "array"] +algorithms = ["backtracking", "dynamic_programming"] +related_problems = ["0093"] + +role = "base" +variant = "dp_precomputed_palindrome" +based_on = [] +delta = "" +complexity = "O(n × 2^n) time, O(n^2) space" +notes = "Backtracking with DP-precomputed palindrome table. Precompute is_palindrome[i][j] for O(1) checks. At each position, try all valid (palindrome) prefixes. O(n^2) preprocessing dominated by O(n × 2^n) backtracking." + +[[solutions]] +key = "naive" +class = "SolutionNaive" +method = "partition" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_string_segmentation"] +families = ["backtracking_combinatorial"] +data_structures = ["string", "array"] +algorithms = ["backtracking"] +related_problems = ["0093"] + +role = "variant" +variant = "on_the_fly_checking" +based_on = ["0131#default"] +delta = "Check palindrome during backtracking (no preprocessing). Simpler code but slower for repeated checks." +complexity = "O(n × 2^n × n) time, O(n) space" +notes = "Alternative approach with on-the-fly palindrome checking. Demonstrates trade-off between preprocessing and runtime checking." + diff --git a/meta/problems/0216_combination_sum_iii.toml b/meta/problems/0216_combination_sum_iii.toml new file mode 100644 index 0000000..ea98d27 --- /dev/null +++ b/meta/problems/0216_combination_sum_iii.toml @@ -0,0 +1,52 @@ +# Problem: Combination Sum III +# https://leetcode.com/problems/combination-sum-iii/ + +# ===== Problem Info ===== +id = "0216" +slug = "0216_combination_sum_iii" +title = "Combination Sum III" +leetcode_id = 216 +url = "https://leetcode.com/problems/combination-sum-iii/" + +# ===== LeetCode Official Metadata ===== +difficulty = "medium" +topics = ["array", "backtracking"] +companies = ["google", "amazon"] + +# ===== Roadmaps ===== +roadmaps = [] + +# ===== Ontology Tags (Problem Level) ===== +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0039", "0040", "0077"] + +# ===== File Locations ===== +[files] +solution = "solutions/0216_combination_sum_iii.py" +generator = "generators/0216_combination_sum_iii.py" +tests_dir = "tests/0216_combination_sum_iii/" + +# ===== Solutions ===== +[[solutions]] +key = "default" +class = "Solution" +method = "combinationSum3" + +api_kernels = ["BacktrackingExploration"] +patterns = ["backtracking_combination"] +families = ["backtracking_combinatorial"] +data_structures = ["array"] +algorithms = ["backtracking"] +related_problems = ["0039", "0040"] + +role = "variant" +variant = "fixed_count_bounded_range" +based_on = ["0040#default"] +delta = "Dual constraint: fixed count k and fixed sum n. Fixed range [1-9] with all distinct (no duplicates to handle). Prune on both count and sum dimensions." +complexity = "O(C(9,k) × k) time, O(k) space" +notes = "Combines fixed-size constraint (like Combinations) with target sum constraint (like Combination Sum). Simpler than Combination Sum II due to no duplicates." + diff --git a/ontology/families.toml b/ontology/families.toml index e31454f..8367c0d 100644 --- a/ontology/families.toml +++ b/ontology/families.toml @@ -24,6 +24,22 @@ summary = "Problems where answer is found by binary searching a value space." id = "backtracking_combinatorial" summary = "Problems generating permutations, combinations, or placements." +[[families]] +id = "combination_sum" +summary = "Problems finding combinations that sum to a target value." + +[[families]] +id = "string_segmentation" +summary = "Problems partitioning strings into valid segments (IP, palindromes)." + +[[families]] +id = "grid_path_search" +summary = "Problems finding paths in 2D grids using DFS/backtracking." + +[[families]] +id = "constraint_satisfaction" +summary = "Problems like N-Queens requiring constraint checking during search." + [[families]] id = "linked_list_manipulation" summary = "Problems involving in-place linked list operations." diff --git a/ontology/patterns.toml b/ontology/patterns.toml index ff154f4..dd22509 100644 --- a/ontology/patterns.toml +++ b/ontology/patterns.toml @@ -122,6 +122,36 @@ id = "backtracking_sudoku" api_kernel = "BacktrackingExploration" summary = "Fill sudoku grid with constraint propagation." +[[patterns]] +id = "backtracking_combination_sum" +api_kernel = "BacktrackingExploration" +summary = "Find combinations that sum to target, with or without reuse." + +[[patterns]] +id = "backtracking_combination_dedup" +api_kernel = "BacktrackingExploration" +summary = "Combinations with duplicate handling via same-level skip." + +[[patterns]] +id = "backtracking_permutation_dedup" +api_kernel = "BacktrackingExploration" +summary = "Unique permutations with sorting and same-level deduplication." + +[[patterns]] +id = "backtracking_subset_dedup" +api_kernel = "BacktrackingExploration" +summary = "Unique subsets with sorting and same-level deduplication." + +[[patterns]] +id = "backtracking_string_partition" +api_kernel = "BacktrackingExploration" +summary = "Partition string into valid segments (IP, palindromes)." + +[[patterns]] +id = "backtracking_grid_path" +api_kernel = "BacktrackingExploration" +summary = "DFS path search in grid with visited marking." + # === Linked List Patterns === [[patterns]] id = "linked_list_k_group_reversal" diff --git a/solutions/0003_longest_substring_without_repeating_characters.py b/solutions/0003_longest_substring_without_repeating_characters.py index 34dd53f..f119139 100644 --- a/solutions/0003_longest_substring_without_repeating_characters.py +++ b/solutions/0003_longest_substring_without_repeating_characters.py @@ -128,7 +128,6 @@ def _brute_force(s: str) -> int: # - Uses last-seen-index array for O(1) duplicate detection # - Direct position jumping instead of incremental contraction # ============================================================================ - class Solution: """ Optimal solution using sliding window with jump optimization. @@ -180,7 +179,6 @@ def lengthOfLongestSubstring(self, s: str) -> int: # - Same sliding window approach with dictionary instead of array # - More flexible for Unicode strings but slightly slower # ============================================================================ - class SolutionDict: """ Alternative implementation using dictionary for last-seen index. @@ -210,7 +208,6 @@ def lengthOfLongestSubstring(self, s: str) -> int: # - Uses set to track current window characters # - Demonstrates standard while-loop contraction pattern # ============================================================================ - class SolutionWithSet: """ Alternative implementation using a set for the current window. diff --git a/solutions/0039_combination_sum.py b/solutions/0039_combination_sum.py index 0d6d432..589c89c 100644 --- a/solutions/0039_combination_sum.py +++ b/solutions/0039_combination_sum.py @@ -10,11 +10,60 @@ Sub-Pattern: Target search with element reuse Key Insight: Allow reuse by NOT incrementing start_index when recursing. Prune branches where remaining target < 0 or current element > remaining. + +Constraints: +- 1 <= candidates.length <= 30 +- 2 <= candidates[i] <= 40 +- All elements of candidates are distinct +- 1 <= target <= 40 """ from typing import List from _runner import get_solver +# ============================================================================ +# JUDGE_FUNC - Custom validation for combination problems +# ============================================================================ +def judge(actual: List[List[int]], expected, input_data: str) -> bool: + """ + Validate Combination Sum results. + + Checks: + 1. Each combination sums to target + 2. Each combination uses only candidates (with reuse allowed) + 3. No duplicate combinations + 4. Correct number of combinations + """ + lines = input_data.strip().split('\n') + candidates = list(map(int, lines[0].split(','))) + target = int(lines[1]) + candidates_set = set(candidates) + + # Validate each combination + for combo in actual: + # Check sum equals target + if sum(combo) != target: + return False + # Check all elements are from candidates + for num in combo: + if num not in candidates_set: + return False + + # Check no duplicates (sort each combo for comparison) + sorted_combos = [tuple(sorted(c)) for c in actual] + if len(set(sorted_combos)) != len(actual): + return False + + # Compare count with expected if available + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + SOLUTIONS = { "default": { "class": "Solution", @@ -25,6 +74,13 @@ } +# ============================================================================ +# Solution 1: Backtracking with Element Reuse +# Time: O(n^(t/m)), Space: O(t/m) +# - Track remaining target; allow reuse by recursing with same index +# - Prune when candidate > remaining (sorted array optimization) +# - t = target, m = min(candidates), n = len(candidates) +# ============================================================================ class Solution: def combinationSum(self, candidates: List[int], target: int) -> List[List[int]]: """ diff --git a/solutions/0040_combination_sum_ii.py b/solutions/0040_combination_sum_ii.py index 69a55e0..8b0016d 100644 --- a/solutions/0040_combination_sum_ii.py +++ b/solutions/0040_combination_sum_ii.py @@ -14,11 +14,59 @@ Delta from Combination Sum (LeetCode 39): - No reuse: recurse with i+1 instead of i - Handle duplicates: sort + same-level skip + +Constraints: +- 1 <= candidates.length <= 100 +- 1 <= candidates[i] <= 50 +- 1 <= target <= 30 """ from typing import List from _runner import get_solver +# ============================================================================ +# JUDGE_FUNC - Custom validation for combination problems +# ============================================================================ +def judge(actual: List[List[int]], expected, input_data: str) -> bool: + """ + Validate Combination Sum II results. + + Checks: + 1. Each combination sums to target + 2. Each element used at most as many times as it appears in candidates + 3. No duplicate combinations + """ + lines = input_data.strip().split('\n') + candidates = list(map(int, lines[0].split(','))) + target = int(lines[1]) + + from collections import Counter + candidates_count = Counter(candidates) + + for combo in actual: + # Check sum equals target + if sum(combo) != target: + return False + # Check element usage doesn't exceed availability + combo_count = Counter(combo) + for num, cnt in combo_count.items(): + if cnt > candidates_count.get(num, 0): + return False + + # Check no duplicate combinations + sorted_combos = [tuple(sorted(c)) for c in actual] + if len(set(sorted_combos)) != len(actual): + return False + + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + SOLUTIONS = { "default": { "class": "Solution", @@ -29,6 +77,13 @@ } +# ============================================================================ +# Solution 1: Backtracking with No-Reuse and Deduplication +# Time: O(2^n), Space: O(n) +# - Sort to bring duplicates together for deduplication +# - No reuse: recurse with i+1 (each element used at most once) +# - Same-level dedup: skip if i > start and candidates[i] == candidates[i-1] +# ============================================================================ class Solution: def combinationSum2(self, candidates: List[int], target: int) -> List[List[int]]: """ diff --git a/solutions/0046_permutations.py b/solutions/0046_permutations.py index 25b45dd..b90e4b0 100644 --- a/solutions/0046_permutations.py +++ b/solutions/0046_permutations.py @@ -11,9 +11,52 @@ This is the BASE TEMPLATE for the BacktrackingExploration API Kernel's permutation sub-pattern. + +Constraints: +- 1 <= nums.length <= 6 +- -10 <= nums[i] <= 10 +- All the integers of nums are unique """ from typing import List from _runner import get_solver +import math + + +# ============================================================================ +# JUDGE_FUNC - Custom validation for permutation problems +# ============================================================================ +def judge(actual: List[List[int]], expected, input_data: str) -> bool: + """ + Validate Permutations results. + + Checks: + 1. Each result is a valid permutation (same elements as input) + 2. No duplicate permutations + 3. Correct count (n! permutations) + """ + nums = list(map(int, input_data.strip().split(','))) + n = len(nums) + nums_sorted = sorted(nums) + + # Each permutation should have same elements as input + for perm in actual: + if sorted(perm) != nums_sorted: + return False + + # Check no duplicates + perm_tuples = [tuple(p) for p in actual] + if len(set(perm_tuples)) != len(actual): + return False + + # Check correct count + expected_count = math.factorial(n) + if len(actual) != expected_count: + return False + + return True + + +JUDGE_FUNC = judge SOLUTIONS = { @@ -26,6 +69,13 @@ } +# ============================================================================ +# Solution 1: Backtracking with Used Array +# Time: O(n! × n), Space: O(n) +# - Build permutation position by position +# - At each position, try every unused element +# - Track usage with boolean array; n! permutations × O(n) copy each +# ============================================================================ class Solution: def permute(self, nums: List[int]) -> List[List[int]]: """ diff --git a/solutions/0047_permutations_ii.py b/solutions/0047_permutations_ii.py index 5342c52..c5553ed 100644 --- a/solutions/0047_permutations_ii.py +++ b/solutions/0047_permutations_ii.py @@ -14,9 +14,55 @@ Delta from Base (LeetCode 46): - Sort the input array to bring duplicates together - Add deduplication: skip nums[i] if it equals nums[i-1] and !used[i-1] + +Constraints: +- 1 <= nums.length <= 8 +- -10 <= nums[i] <= 10 """ from typing import List from _runner import get_solver +from collections import Counter +import math + + +# ============================================================================ +# JUDGE_FUNC - Custom validation for permutation problems with duplicates +# ============================================================================ +def judge(actual: List[List[int]], expected, input_data: str) -> bool: + """ + Validate Permutations II results. + + Checks: + 1. Each result is a valid permutation (same multiset as input) + 2. No duplicate permutations + 3. Correct count (n! / (k1! * k2! * ...)) + """ + nums = list(map(int, input_data.strip().split(','))) + n = len(nums) + nums_count = Counter(nums) + + # Each permutation should have same elements as input + for perm in actual: + if Counter(perm) != nums_count: + return False + + # Check no duplicates + perm_tuples = [tuple(p) for p in actual] + if len(set(perm_tuples)) != len(actual): + return False + + # Calculate expected unique permutations: n! / (k1! * k2! * ...) + expected_count = math.factorial(n) + for cnt in nums_count.values(): + expected_count //= math.factorial(cnt) + + if len(actual) != expected_count: + return False + + return True + + +JUDGE_FUNC = judge SOLUTIONS = { @@ -29,6 +75,13 @@ } +# ============================================================================ +# Solution 1: Backtracking with Sorting and Same-Level Deduplication +# Time: O(n! × n), Space: O(n) +# - Sort to bring duplicates together +# - Skip duplicate if previous identical element is unused (same level) +# - Ensures leftmost duplicate is always picked first at each level +# ============================================================================ class Solution: def permuteUnique(self, nums: List[int]) -> List[List[int]]: """ diff --git a/solutions/0052_n_queens_ii.py b/solutions/0052_n_queens_ii.py index af852b8..ffbc212 100644 --- a/solutions/0052_n_queens_ii.py +++ b/solutions/0052_n_queens_ii.py @@ -33,6 +33,30 @@ } +# Known solution counts for N-Queens +KNOWN_COUNTS = {1: 1, 2: 0, 3: 0, 4: 2, 5: 10, 6: 4, 7: 40, 8: 92, 9: 352} + + +# ============================================================================ +# JUDGE_FUNC - Validate N-Queens II count +# ============================================================================ +def judge(actual: int, expected, input_data: str) -> bool: + """Validate N-Queens II result against known counts.""" + n = int(input_data.strip()) + if expected is not None: + return actual == expected + return actual == KNOWN_COUNTS.get(n, actual) + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Hash Sets +# Time: O(n!), Space: O(n) +# - Place queens row by row; track column/diagonal constraints with sets +# - O(1) conflict checking using hash sets +# ============================================================================ class SolutionSets: def totalNQueens(self, n: int) -> int: """ @@ -109,6 +133,12 @@ def backtrack(row: int) -> None: return count +# ============================================================================ +# Solution 2: Backtracking with Bitmask +# Time: O(n!), Space: O(n) +# - Use integers as bitmasks for ultra-fast constraint checking +# - Bitwise operations are faster than hash lookups +# ============================================================================ class SolutionBitmask: def totalNQueens(self, n: int) -> int: """ diff --git a/solutions/0077_combinations.py b/solutions/0077_combinations.py index 812dd81..e013fd4 100644 --- a/solutions/0077_combinations.py +++ b/solutions/0077_combinations.py @@ -28,6 +28,49 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate combinations +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Combinations results.""" + import math + lines = input_data.strip().split('\n') + n = int(lines[0]) + k = int(lines[1]) + + # Check each combination has k elements from [1, n] + for combo in actual: + if len(combo) != k: + return False + if len(set(combo)) != k: # no duplicates + return False + for num in combo: + if num < 1 or num > n: + return False + + # Check no duplicate combinations + sorted_combos = [tuple(sorted(c)) for c in actual] + if len(set(sorted_combos)) != len(actual): + return False + + # Check correct count: C(n,k) + expected_count = math.comb(n, k) + if len(actual) != expected_count: + return False + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Count Pruning +# Time: O(k × C(n,k)), Space: O(k) +# - Use start_index for canonical ordering +# - Only collect when path has exactly k elements +# - Prune when remaining elements < elements needed +# ============================================================================ class Solution: def combine(self, n: int, k: int) -> List[List[int]]: """ diff --git a/solutions/0078_subsets.py b/solutions/0078_subsets.py index 4a01f7c..c174803 100644 --- a/solutions/0078_subsets.py +++ b/solutions/0078_subsets.py @@ -26,6 +26,43 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate subsets +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Subsets results.""" + nums = list(map(int, input_data.strip().split(','))) + n = len(nums) + nums_set = set(nums) + + # Each subset should only contain elements from nums + for subset in actual: + for num in subset: + if num not in nums_set: + return False + + # Check no duplicate subsets + sorted_subsets = [tuple(sorted(s)) for s in actual] + if len(set(sorted_subsets)) != len(actual): + return False + + # Check correct count: 2^n subsets + if len(actual) != (1 << n): + return False + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Start-Index Canonicalization +# Time: O(n × 2^n), Space: O(n) +# - Use start_index to enforce canonical ordering +# - Collect at every node (not just leaves) +# - 2^n subsets, O(n) to copy each +# ============================================================================ class Solution: def subsets(self, nums: List[int]) -> List[List[int]]: """ diff --git a/solutions/0079_word_search.py b/solutions/0079_word_search.py index f0afa3d..95f1019 100644 --- a/solutions/0079_word_search.py +++ b/solutions/0079_word_search.py @@ -28,6 +28,13 @@ } +# ============================================================================ +# Solution 1: DFS Backtracking with In-Place Visited Marking +# Time: O(m × n × 4^L), Space: O(L) +# - Start DFS from each cell matching word[0] +# - Mark visited cells in-place (temporarily change to '#') +# - Try all 4 directions; unmark on backtrack +# ============================================================================ class Solution: def exist(self, board: List[List[str]], word: str) -> bool: """ diff --git a/solutions/0090_subsets_ii.py b/solutions/0090_subsets_ii.py index adfe204..da5fd4b 100644 --- a/solutions/0090_subsets_ii.py +++ b/solutions/0090_subsets_ii.py @@ -28,6 +28,43 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate subsets with duplicates +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Subsets II results.""" + from collections import Counter + nums = list(map(int, input_data.strip().split(','))) + nums_count = Counter(nums) + + # Each subset should only contain elements from nums (respecting counts) + for subset in actual: + subset_count = Counter(subset) + for num, cnt in subset_count.items(): + if cnt > nums_count.get(num, 0): + return False + + # Check no duplicate subsets + sorted_subsets = [tuple(sorted(s)) for s in actual] + if len(set(sorted_subsets)) != len(actual): + return False + + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Sorting and Same-Level Deduplication +# Time: O(n × 2^n), Space: O(n) +# - Sort to bring duplicates together +# - Skip if i > start_index and nums[i] == nums[i-1] +# - Ensures each unique subset is generated exactly once +# ============================================================================ class Solution: def subsetsWithDup(self, nums: List[int]) -> List[List[int]]: """ diff --git a/solutions/0093_restore_ip_addresses.py b/solutions/0093_restore_ip_addresses.py index 187d7ea..dc20da1 100644 --- a/solutions/0093_restore_ip_addresses.py +++ b/solutions/0093_restore_ip_addresses.py @@ -28,6 +28,54 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate IP addresses +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Restore IP Addresses results.""" + s = input_data.strip() + + def is_valid_ip(ip: str) -> bool: + """Check if ip is valid and uses all characters from s.""" + parts = ip.split('.') + if len(parts) != 4: + return False + reconstructed = ''.join(parts) + if reconstructed != s: + return False + for part in parts: + if not part: + return False + if len(part) > 1 and part[0] == '0': + return False + if int(part) > 255: + return False + return True + + for ip in actual: + if not is_valid_ip(ip): + return False + + # Check no duplicates + if len(set(actual)) != len(actual): + return False + + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Segment Validation +# Time: O(3^4 × n) = O(n), Space: O(1) +# - Exactly 4 segments; try 1, 2, or 3 chars per segment +# - Validate: no leading zeros, value 0-255 +# - Prune based on remaining length bounds +# ============================================================================ class Solution: def restoreIpAddresses(self, s: str) -> List[str]: """ diff --git a/solutions/0131_palindrome_partitioning.py b/solutions/0131_palindrome_partitioning.py index 9c2efd5..894fb0a 100644 --- a/solutions/0131_palindrome_partitioning.py +++ b/solutions/0131_palindrome_partitioning.py @@ -33,6 +33,46 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate palindrome partitions +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Palindrome Partitioning results.""" + s = input_data.strip() + + def is_palindrome(t: str) -> bool: + return t == t[::-1] + + for partition in actual: + # Check partition reconstructs original string + if ''.join(partition) != s: + return False + # Check each part is a palindrome + for part in partition: + if not is_palindrome(part): + return False + + # Check no duplicate partitions + partition_tuples = [tuple(p) for p in actual] + if len(set(partition_tuples)) != len(actual): + return False + + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with DP-Precomputed Palindrome Table +# Time: O(n × 2^n), Space: O(n^2) +# - Precompute is_palindrome[i][j] for O(1) checks +# - At each position, try all valid (palindrome) prefixes +# - O(n^2) preprocessing dominated by O(n × 2^n) backtracking +# ============================================================================ class SolutionDP: def partition(self, s: str) -> List[List[str]]: """ @@ -104,6 +144,12 @@ def backtrack(start: int) -> None: return results +# ============================================================================ +# Solution 2: Backtracking with On-the-Fly Checking +# Time: O(n × 2^n × n), Space: O(n) +# - Check palindrome during backtracking (no preprocessing) +# - Simpler code but slower for repeated checks +# ============================================================================ class SolutionNaive: def partition(self, s: str) -> List[List[str]]: """ diff --git a/solutions/0216_combination_sum_iii.py b/solutions/0216_combination_sum_iii.py index 7df0738..48107df 100644 --- a/solutions/0216_combination_sum_iii.py +++ b/solutions/0216_combination_sum_iii.py @@ -30,6 +30,50 @@ } +# ============================================================================ +# JUDGE_FUNC - Validate Combination Sum III +# ============================================================================ +def judge(actual, expected, input_data: str) -> bool: + """Validate Combination Sum III results.""" + lines = input_data.strip().split('\n') + k = int(lines[0]) + n = int(lines[1]) + + for combo in actual: + # Check exactly k numbers + if len(combo) != k: + return False + # Check sum equals n + if sum(combo) != n: + return False + # Check all numbers in [1,9] and unique + if len(set(combo)) != k: + return False + for num in combo: + if num < 1 or num > 9: + return False + + # Check no duplicate combinations + sorted_combos = [tuple(sorted(c)) for c in actual] + if len(set(sorted_combos)) != len(actual): + return False + + if expected is not None: + return len(actual) == len(expected) + + return True + + +JUDGE_FUNC = judge + + +# ============================================================================ +# Solution 1: Backtracking with Dual Constraint Pruning +# Time: O(C(9,k) × k), Space: O(k) +# - Fixed size k and fixed sum n constraints +# - Range [1-9]: all distinct, no duplicates +# - Prune on both count and sum dimensions +# ============================================================================ class Solution: def combinationSum3(self, k: int, n: int) -> List[List[int]]: """ diff --git a/tests/0039_combination_sum_1.in b/tests/0039_combination_sum_1.in new file mode 100644 index 0000000..fb2852c --- /dev/null +++ b/tests/0039_combination_sum_1.in @@ -0,0 +1,2 @@ +2,3,6,7 +7 diff --git a/tests/0039_combination_sum_1.out b/tests/0039_combination_sum_1.out new file mode 100644 index 0000000..799ab26 --- /dev/null +++ b/tests/0039_combination_sum_1.out @@ -0,0 +1 @@ +[[2, 2, 3], [7]] diff --git a/tests/0039_combination_sum_2.in b/tests/0039_combination_sum_2.in new file mode 100644 index 0000000..c4813ad --- /dev/null +++ b/tests/0039_combination_sum_2.in @@ -0,0 +1,2 @@ +2,3,5 +8 diff --git a/tests/0039_combination_sum_2.out b/tests/0039_combination_sum_2.out new file mode 100644 index 0000000..ead6098 --- /dev/null +++ b/tests/0039_combination_sum_2.out @@ -0,0 +1 @@ +[[2, 2, 2, 2], [2, 3, 3], [3, 5]] diff --git a/tests/0040_combination_sum_ii_1.in b/tests/0040_combination_sum_ii_1.in new file mode 100644 index 0000000..392c5b3 --- /dev/null +++ b/tests/0040_combination_sum_ii_1.in @@ -0,0 +1,2 @@ +10,1,2,7,6,1,5 +8 diff --git a/tests/0040_combination_sum_ii_1.out b/tests/0040_combination_sum_ii_1.out new file mode 100644 index 0000000..43a8adc --- /dev/null +++ b/tests/0040_combination_sum_ii_1.out @@ -0,0 +1 @@ +[[1, 1, 6], [1, 2, 5], [1, 7], [2, 6]] diff --git a/tests/0040_combination_sum_ii_2.in b/tests/0040_combination_sum_ii_2.in new file mode 100644 index 0000000..07069ec --- /dev/null +++ b/tests/0040_combination_sum_ii_2.in @@ -0,0 +1,2 @@ +2,5,2,1,2 +5 diff --git a/tests/0040_combination_sum_ii_2.out b/tests/0040_combination_sum_ii_2.out new file mode 100644 index 0000000..acb80a6 --- /dev/null +++ b/tests/0040_combination_sum_ii_2.out @@ -0,0 +1 @@ +[[1, 2, 2], [5]] diff --git a/tests/0046_permutations_1.in b/tests/0046_permutations_1.in new file mode 100644 index 0000000..b0246d5 --- /dev/null +++ b/tests/0046_permutations_1.in @@ -0,0 +1 @@ +1,2,3 diff --git a/tests/0046_permutations_1.out b/tests/0046_permutations_1.out new file mode 100644 index 0000000..145889e --- /dev/null +++ b/tests/0046_permutations_1.out @@ -0,0 +1 @@ +[[1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]] diff --git a/tests/0046_permutations_2.in b/tests/0046_permutations_2.in new file mode 100644 index 0000000..93c12d9 --- /dev/null +++ b/tests/0046_permutations_2.in @@ -0,0 +1 @@ +0,1 diff --git a/tests/0046_permutations_2.out b/tests/0046_permutations_2.out new file mode 100644 index 0000000..c777460 --- /dev/null +++ b/tests/0046_permutations_2.out @@ -0,0 +1 @@ +[[0, 1], [1, 0]] diff --git a/tests/0047_permutations_ii_1.in b/tests/0047_permutations_ii_1.in new file mode 100644 index 0000000..f9dd88d --- /dev/null +++ b/tests/0047_permutations_ii_1.in @@ -0,0 +1 @@ +1,1,2 diff --git a/tests/0047_permutations_ii_1.out b/tests/0047_permutations_ii_1.out new file mode 100644 index 0000000..c907747 --- /dev/null +++ b/tests/0047_permutations_ii_1.out @@ -0,0 +1 @@ +[[1, 1, 2], [1, 2, 1], [2, 1, 1]] diff --git a/tests/0047_permutations_ii_2.in b/tests/0047_permutations_ii_2.in new file mode 100644 index 0000000..b0246d5 --- /dev/null +++ b/tests/0047_permutations_ii_2.in @@ -0,0 +1 @@ +1,2,3 diff --git a/tests/0047_permutations_ii_2.out b/tests/0047_permutations_ii_2.out new file mode 100644 index 0000000..145889e --- /dev/null +++ b/tests/0047_permutations_ii_2.out @@ -0,0 +1 @@ +[[1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]] diff --git a/tests/0052_n_queens_ii_1.in b/tests/0052_n_queens_ii_1.in new file mode 100644 index 0000000..b8626c4 --- /dev/null +++ b/tests/0052_n_queens_ii_1.in @@ -0,0 +1 @@ +4 diff --git a/tests/0052_n_queens_ii_1.out b/tests/0052_n_queens_ii_1.out new file mode 100644 index 0000000..0cfbf08 --- /dev/null +++ b/tests/0052_n_queens_ii_1.out @@ -0,0 +1 @@ +2 diff --git a/tests/0052_n_queens_ii_2.in b/tests/0052_n_queens_ii_2.in new file mode 100644 index 0000000..d00491f --- /dev/null +++ b/tests/0052_n_queens_ii_2.in @@ -0,0 +1 @@ +1 diff --git a/tests/0052_n_queens_ii_2.out b/tests/0052_n_queens_ii_2.out new file mode 100644 index 0000000..d00491f --- /dev/null +++ b/tests/0052_n_queens_ii_2.out @@ -0,0 +1 @@ +1 diff --git a/tests/0077_combinations_1.in b/tests/0077_combinations_1.in new file mode 100644 index 0000000..fc7fa75 --- /dev/null +++ b/tests/0077_combinations_1.in @@ -0,0 +1,2 @@ +4 +2 diff --git a/tests/0077_combinations_1.out b/tests/0077_combinations_1.out new file mode 100644 index 0000000..96ecbac --- /dev/null +++ b/tests/0077_combinations_1.out @@ -0,0 +1 @@ +[[1, 2], [1, 3], [1, 4], [2, 3], [2, 4], [3, 4]] diff --git a/tests/0077_combinations_2.in b/tests/0077_combinations_2.in new file mode 100644 index 0000000..6ed281c --- /dev/null +++ b/tests/0077_combinations_2.in @@ -0,0 +1,2 @@ +1 +1 diff --git a/tests/0077_combinations_2.out b/tests/0077_combinations_2.out new file mode 100644 index 0000000..42feff4 --- /dev/null +++ b/tests/0077_combinations_2.out @@ -0,0 +1 @@ +[[1]] diff --git a/tests/0078_subsets_1.in b/tests/0078_subsets_1.in new file mode 100644 index 0000000..b0246d5 --- /dev/null +++ b/tests/0078_subsets_1.in @@ -0,0 +1 @@ +1,2,3 diff --git a/tests/0078_subsets_1.out b/tests/0078_subsets_1.out new file mode 100644 index 0000000..a1f01de --- /dev/null +++ b/tests/0078_subsets_1.out @@ -0,0 +1 @@ +[[], [1], [1, 2], [1, 2, 3], [1, 3], [2], [2, 3], [3]] diff --git a/tests/0078_subsets_2.in b/tests/0078_subsets_2.in new file mode 100644 index 0000000..573541a --- /dev/null +++ b/tests/0078_subsets_2.in @@ -0,0 +1 @@ +0 diff --git a/tests/0078_subsets_2.out b/tests/0078_subsets_2.out new file mode 100644 index 0000000..da65450 --- /dev/null +++ b/tests/0078_subsets_2.out @@ -0,0 +1 @@ +[[], [0]] diff --git a/tests/0079_word_search_1.in b/tests/0079_word_search_1.in new file mode 100644 index 0000000..f5b4475 --- /dev/null +++ b/tests/0079_word_search_1.in @@ -0,0 +1,5 @@ +3,4 +A,B,C,E +S,F,C,S +A,D,E,E +ABCCED diff --git a/tests/0079_word_search_1.out b/tests/0079_word_search_1.out new file mode 100644 index 0000000..0ca9514 --- /dev/null +++ b/tests/0079_word_search_1.out @@ -0,0 +1 @@ +True diff --git a/tests/0079_word_search_2.in b/tests/0079_word_search_2.in new file mode 100644 index 0000000..f729989 --- /dev/null +++ b/tests/0079_word_search_2.in @@ -0,0 +1,5 @@ +3,4 +A,B,C,E +S,F,C,S +A,D,E,E +SEE diff --git a/tests/0079_word_search_2.out b/tests/0079_word_search_2.out new file mode 100644 index 0000000..0ca9514 --- /dev/null +++ b/tests/0079_word_search_2.out @@ -0,0 +1 @@ +True diff --git a/tests/0079_word_search_3.in b/tests/0079_word_search_3.in new file mode 100644 index 0000000..2b8b5b4 --- /dev/null +++ b/tests/0079_word_search_3.in @@ -0,0 +1,5 @@ +3,4 +A,B,C,E +S,F,C,S +A,D,E,E +ABCB diff --git a/tests/0079_word_search_3.out b/tests/0079_word_search_3.out new file mode 100644 index 0000000..bc59c12 --- /dev/null +++ b/tests/0079_word_search_3.out @@ -0,0 +1 @@ +False diff --git a/tests/0090_subsets_ii_1.in b/tests/0090_subsets_ii_1.in new file mode 100644 index 0000000..98aa988 --- /dev/null +++ b/tests/0090_subsets_ii_1.in @@ -0,0 +1 @@ +1,2,2 diff --git a/tests/0090_subsets_ii_1.out b/tests/0090_subsets_ii_1.out new file mode 100644 index 0000000..b967b36 --- /dev/null +++ b/tests/0090_subsets_ii_1.out @@ -0,0 +1 @@ +[[], [1], [1, 2], [1, 2, 2], [2], [2, 2]] diff --git a/tests/0090_subsets_ii_2.in b/tests/0090_subsets_ii_2.in new file mode 100644 index 0000000..573541a --- /dev/null +++ b/tests/0090_subsets_ii_2.in @@ -0,0 +1 @@ +0 diff --git a/tests/0090_subsets_ii_2.out b/tests/0090_subsets_ii_2.out new file mode 100644 index 0000000..da65450 --- /dev/null +++ b/tests/0090_subsets_ii_2.out @@ -0,0 +1 @@ +[[], [0]] diff --git a/tests/0093_restore_ip_addresses_1.in b/tests/0093_restore_ip_addresses_1.in new file mode 100644 index 0000000..9d21c87 --- /dev/null +++ b/tests/0093_restore_ip_addresses_1.in @@ -0,0 +1 @@ +25525511135 diff --git a/tests/0093_restore_ip_addresses_1.out b/tests/0093_restore_ip_addresses_1.out new file mode 100644 index 0000000..319d446 --- /dev/null +++ b/tests/0093_restore_ip_addresses_1.out @@ -0,0 +1 @@ +['255.255.11.135', '255.255.111.35'] diff --git a/tests/0093_restore_ip_addresses_2.in b/tests/0093_restore_ip_addresses_2.in new file mode 100644 index 0000000..739d797 --- /dev/null +++ b/tests/0093_restore_ip_addresses_2.in @@ -0,0 +1 @@ +0000 diff --git a/tests/0093_restore_ip_addresses_2.out b/tests/0093_restore_ip_addresses_2.out new file mode 100644 index 0000000..6891267 --- /dev/null +++ b/tests/0093_restore_ip_addresses_2.out @@ -0,0 +1 @@ +['0.0.0.0'] diff --git a/tests/0093_restore_ip_addresses_3.in b/tests/0093_restore_ip_addresses_3.in new file mode 100644 index 0000000..1f22676 --- /dev/null +++ b/tests/0093_restore_ip_addresses_3.in @@ -0,0 +1 @@ +101023 diff --git a/tests/0093_restore_ip_addresses_3.out b/tests/0093_restore_ip_addresses_3.out new file mode 100644 index 0000000..1b79e03 --- /dev/null +++ b/tests/0093_restore_ip_addresses_3.out @@ -0,0 +1 @@ +['1.0.10.23', '1.0.102.3', '10.1.0.23', '10.10.2.3', '101.0.2.3'] diff --git a/tests/0131_palindrome_partitioning_1.in b/tests/0131_palindrome_partitioning_1.in new file mode 100644 index 0000000..6f27bcf --- /dev/null +++ b/tests/0131_palindrome_partitioning_1.in @@ -0,0 +1 @@ +aab diff --git a/tests/0131_palindrome_partitioning_1.out b/tests/0131_palindrome_partitioning_1.out new file mode 100644 index 0000000..fc7ba43 --- /dev/null +++ b/tests/0131_palindrome_partitioning_1.out @@ -0,0 +1 @@ +[['a', 'a', 'b'], ['aa', 'b']] diff --git a/tests/0131_palindrome_partitioning_2.in b/tests/0131_palindrome_partitioning_2.in new file mode 100644 index 0000000..7898192 --- /dev/null +++ b/tests/0131_palindrome_partitioning_2.in @@ -0,0 +1 @@ +a diff --git a/tests/0131_palindrome_partitioning_2.out b/tests/0131_palindrome_partitioning_2.out new file mode 100644 index 0000000..8698eed --- /dev/null +++ b/tests/0131_palindrome_partitioning_2.out @@ -0,0 +1 @@ +[['a']] diff --git a/tests/0216_combination_sum_iii_1.in b/tests/0216_combination_sum_iii_1.in new file mode 100644 index 0000000..dde5d5d --- /dev/null +++ b/tests/0216_combination_sum_iii_1.in @@ -0,0 +1,2 @@ +3 +7 diff --git a/tests/0216_combination_sum_iii_1.out b/tests/0216_combination_sum_iii_1.out new file mode 100644 index 0000000..66828f6 --- /dev/null +++ b/tests/0216_combination_sum_iii_1.out @@ -0,0 +1 @@ +[[1, 2, 4]] diff --git a/tests/0216_combination_sum_iii_2.in b/tests/0216_combination_sum_iii_2.in new file mode 100644 index 0000000..5d57ce0 --- /dev/null +++ b/tests/0216_combination_sum_iii_2.in @@ -0,0 +1,2 @@ +3 +9 diff --git a/tests/0216_combination_sum_iii_2.out b/tests/0216_combination_sum_iii_2.out new file mode 100644 index 0000000..bb04349 --- /dev/null +++ b/tests/0216_combination_sum_iii_2.out @@ -0,0 +1 @@ +[[1, 2, 6], [1, 3, 5], [2, 3, 4]] diff --git a/tests/0216_combination_sum_iii_3.in b/tests/0216_combination_sum_iii_3.in new file mode 100644 index 0000000..dcb4347 --- /dev/null +++ b/tests/0216_combination_sum_iii_3.in @@ -0,0 +1,2 @@ +4 +1 diff --git a/tests/0216_combination_sum_iii_3.out b/tests/0216_combination_sum_iii_3.out new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tests/0216_combination_sum_iii_3.out @@ -0,0 +1 @@ +[] diff --git a/tools/README.md b/tools/README.md index 0de82b8..8a21eae 100644 --- a/tools/README.md +++ b/tools/README.md @@ -10,6 +10,7 @@ Developer tools for checking, validating, and generating project content. |----------|------|---------| | **Checking** | [`check_solutions.py`](#check_solutionspy) | Validate solution file architecture compliance | | | [`run_format_tests.py`](#run_format_testspy) | Run format unit tests | +| | [`check_test_files.py`](#check_test_filespy) | Check and fix test files with double newline endings | | **Generation** | [`generate_mindmaps.py`](#generate_mindmapspy) | Rule-based mind map generation | | | [`generate_mindmaps_ai.py`](#generate_mindmaps_aipy) | AI-powered mind map generation | | | [`generate_pattern_docs.py`](#generate_pattern_docspy) | Pattern documentation generation | @@ -43,6 +44,7 @@ python tools/generate_pattern_docs.py tools/ ├── README.md # This file ├── check_solutions.py # Solution file checker +├── check_test_files.py # Test file format checker/fixer ├── run_format_tests.py # Format test runner ├── run_format_tests.bat/.sh # Format test scripts │ @@ -131,6 +133,33 @@ tools\run_format_tests.bat # Windows tools/run_format_tests.sh # Linux/Mac ``` +### `check_test_files.py` + +Check and fix double newline ending errors in test files under `tests/` directory. + +Checks all `.in` and `.out` files to find files ending with two newlines (`\n\n`). + +```bash +python tools/check_test_files.py # List problematic files +python tools/check_test_files.py --fix # List and auto-fix +python tools/check_test_files.py --verbose # Show detailed info +``` + +**Features:** +- List all test files ending with two newlines +- Auto-fix: Remove extra newline, keep only one + +**Example Output:** +``` +Found 3 files ending with two newlines: + + tests/0977_squares_of_a_sorted_array_1.in + tests/0977_squares_of_a_sorted_array_1.out + tests/0142_linked_list_cycle_ii_1.in + +Tip: Use --fix to automatically fix these issues. +``` + --- ## 🧠 Mind Map Generation diff --git a/tools/check_test_files.py b/tools/check_test_files.py new file mode 100644 index 0000000..527c121 --- /dev/null +++ b/tools/check_test_files.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +""" +Check and fix double newline ending errors in test files under tests/ directory. + +Checks all .in and .out files to find files ending with two newlines (\n\n). + +Features: +1. List problematic files +2. Fix these files (remove extra newline) + +Exit Codes: +- 0: All files OK or fix successful +- 1: Errors found but not fixed + +Usage: + python tools/check_test_files.py # List problematic files + python tools/check_test_files.py --fix # List and fix + python tools/check_test_files.py --verbose # Show detailed info +""" +import sys +from pathlib import Path +from typing import List + + +PROJECT_ROOT = Path(__file__).parent.parent +TESTS_DIR = PROJECT_ROOT / "tests" + + +def check_file(file_path: Path) -> bool: + """ + Check if file ends with two newlines. + + Returns: + True if file ends with \n\n (problematic) + False if file is OK + """ + try: + with open(file_path, 'rb') as f: + data = f.read() + # Check if ends with \n\n + return data.endswith(b'\n\n') + except Exception as e: + print(f"Error: Cannot read file {file_path}: {e}", file=sys.stderr) + return False + + +def fix_file(file_path: Path) -> bool: + """ + Fix file: remove extra newline at end, keep only one. + + Returns: + True if fix successful + False if fix failed + """ + try: + with open(file_path, 'rb') as f: + data = f.read() + + # If ends with \n\n, remove one \n + if data.endswith(b'\n\n'): + # Remove last \n + fixed_data = data[:-1] + + with open(file_path, 'wb') as f: + f.write(fixed_data) + return True + return False + except Exception as e: + print(f"Error: Cannot fix file {file_path}: {e}", file=sys.stderr) + return False + + +def find_test_files() -> List[Path]: + """Find all .in and .out test files.""" + if not TESTS_DIR.exists(): + print(f"Error: tests directory does not exist: {TESTS_DIR}", file=sys.stderr) + return [] + + test_files = [] + for ext in ['.in', '.out']: + test_files.extend(TESTS_DIR.glob(f'*{ext}')) + + return sorted(test_files) + + +def main(): + """Main function.""" + # Parse arguments + fix_mode = '--fix' in sys.argv or '-f' in sys.argv + verbose = '--verbose' in sys.argv or '-v' in sys.argv + + # Find all test files + test_files = find_test_files() + + if not test_files: + print("No test files found.") + return 0 + + if verbose: + print(f"Checking {len(test_files)} test files...") + + # Check files + problematic_files = [] + for file_path in test_files: + if check_file(file_path): + problematic_files.append(file_path) + + # Show results + if not problematic_files: + print("✓ All test files are OK (no double newline endings).") + return 0 + + print(f"\nFound {len(problematic_files)} files ending with two newlines:\n") + for file_path in problematic_files: + rel_path = file_path.relative_to(PROJECT_ROOT) + print(f" {rel_path}") + + # Fix mode + if fix_mode: + print(f"\nFixing {len(problematic_files)} files...") + fixed_count = 0 + failed_count = 0 + + for file_path in problematic_files: + if fix_file(file_path): + fixed_count += 1 + if verbose: + rel_path = file_path.relative_to(PROJECT_ROOT) + print(f" ✓ Fixed: {rel_path}") + else: + failed_count += 1 + rel_path = file_path.relative_to(PROJECT_ROOT) + print(f" ✗ Fix failed: {rel_path}", file=sys.stderr) + + print(f"\nFix complete: {fixed_count} succeeded, {failed_count} failed") + return 0 if failed_count == 0 else 1 + else: + print("\nTip: Use --fix to automatically fix these issues.") + return 1 + + +if __name__ == '__main__': + sys.exit(main()) +