diff --git a/fix_issue_76.py b/fix_issue_76.py new file mode 100644 index 00000000..6af44d5e --- /dev/null +++ b/fix_issue_76.py @@ -0,0 +1,3 @@ +```json +{ + "solution_code": "### FILE: app/routes/privacy.py\n\n```python\nimport csv\nimport io\nimport json\nimport logging\nimport os\nimport zipfile\nfrom datetime import datetime, timezone\n\nfrom flask import Blueprint, Response, g, jsonify, request, stream_with_context\nfrom flask_jwt_extended import get_jwt_identity, jwt_required\n\nfrom app.db import get_db\nfrom app.tasks import export_scheduler\nfrom app.utils.audit import log_audit_event\nfrom app.utils.redis_client import delete_user_cache\n\nlogger = logging.getLogger(__name__)\n\nprivacy_bp = Blueprint('privacy', __name__, url_prefix='/users')\n\n\n# ---------------------------------------------------------------------------\n# Helper: fetch all user data from DB\n# ---------------------------------------------------------------------------\n\ndef _fetch_user_data(conn, user_id: int) -> dict:\n cur = conn.cursor()\n\n def query(sql, params):\n cur.execute(sql, params)\n cols = [d[0] for d in cur.description]\n return [dict(zip(cols, row)) for row in cur.fetchall()]\n\n profile = query(\n \"\"\"SELECT id, email, full_name, created_at, updated_at\n FROM users WHERE id = %s\"\"\",\n (user_id,)\n )\n\n expenses = query(\n \"\"\"SELECT e.id, c.name AS category, e.amount, e.description,\n e.date, e.created_at\n FROM expenses e\n LEFT JOIN categories c ON c.id = e.category_id\n WHERE e.user_id = %s\n ORDER BY e.date DESC\"\"\",\n (user_id,)\n )\n\n bills = query(\n \"\"\"SELECT id, name, amount, due_date, is_paid, paid_at, created_at\n FROM bills WHERE user_id = %s ORDER BY due_date\"\"\",\n (user_id,)\n )\n\n reminders = query(\n \"\"\"SELECT id, bill_id, remind_at, channel, sent, created_at\n FROM reminders WHERE user_id = %s ORDER BY remind_at\"\"\",\n (user_id,)\n )\n\n categories = query(\n \"\"\"SELECT id, name, budget_limit, created_at\n FROM categories WHERE user_id = %s ORDER BY name\"\"\",\n (user_id,)\n )\n\n subscriptions = query(\n \"\"\"SELECT us.id, sp.name AS plan, us.started_at, us.expires_at, us.status\n FROM user_subscriptions us\n JOIN subscription_plans sp ON sp.id = us.plan_id\n WHERE us.user_id = %s\"\"\",\n (user_id,)\n )\n\n cur.close()\n return {\n 'profile': profile,\n 'categories': categories,\n 'expenses': expenses,\n 'bills': bills,\n 'reminders': reminders,\n 'subscriptions': subscriptions,\n 'exported_at': datetime.now(timezone.utc).isoformat(),\n }\n\n\ndef _serialize_dates(obj):\n \"\"\"JSON serialiser that handles date/datetime objects.\"\"\"\n if isinstance(obj, (datetime,)):\n return obj.isoformat()\n from datetime import date\n if isinstance(obj, date):\n return obj.isoformat()\n raise TypeError(f'Type {type(obj)} not serialisable')\n\n\ndef _build_zip(data: dict) -> bytes:\n \"\"\"Pack user data into an in-memory ZIP with JSON + CSV files.\"\"\"\n buf = io.BytesIO()\n with zipfile.ZipFile(buf, 'w', zipfile.ZIP_DEFLATED) as zf:\n # master JSON\n zf.writestr(\n 'data.json',\n json.dumps(data, indent=2, default=_serialize_dates)\n )\n # per-section CSVs\n for section, rows in data.items():\n if not isinstance(rows, list) or not rows:\n continue\n csv_buf = io.StringIO()\n writer = csv.DictWriter(csv_buf, fieldnames=rows[0].keys())\n writer.writeheader()\n for row in rows:\n writer.writerow(\n {k: v.isoformat() if isinstance(v, (datetime,)) else v\n for k, v in row.items()}\n )\n zf.writestr(f'{section}.csv', csv_buf.getvalue())\n\n # human-readable README\n zf.writestr('README.txt', (\n 'FinMind — Personal Data Export\\n'\n '================================\\n'\n f'Exported at: {data[\"exported_at\"]}\\n\\n'\n 'Files:\\n'\n ' data.json — complete data set (JSON)\\n'\n ' profile.csv — account information\\n'\n ' expenses.csv — expense records\\n'\n ' bills.csv — bill records\\n'\n ' reminders.csv — reminder records\\n'\n ' categories.csv — categories\\n'\n ' subscriptions.csv — subscription history\\n\\n'\n 'To request deletion contact: privacy@finmind.app\\n'\n ))\n buf.seek(0)\n return buf.read()\n\n\n# ---------------------------------------------------------------------------\n# GET /users/export\n# ---------------------------------------------------------------------------\n\n@privacy_bp.route('/export', methods=['GET'])\n@jwt_required()\ndef export_data():\n \"\"\"\n Generate and return a ZIP archive containing all personal data\n associated with the authenticated user.\n\n Headers added for GDPR compliance.\n \"\"\"\n user_id = int(get_jwt_identity())\n conn = get_db()\n\n try:\n data = _fetch_user_data(conn, user_id)\n zip_bytes = _build_zip(data)\n except Exception as exc:\n logger.exception('Export failed for user %s', user_id)\n return jsonify({'error': 'Export generation failed', 'detail': str(exc)}), 500\n\n log_audit_event(\n conn,\n user_id=user_id,\n action='DATA_EXPORT',\n detail={\n 'records': {\n k: len(v) for k, v in data.items() if isinstance(v, list)\n },\n 'ip': request.remote_addr,\n }\n )\n conn.commit()\n\n filename = f'finmind-export-{user_id}-{datetime.now(timezone.utc).strftime(\"%Y%m%d%H%M%S\")}.zip'\n response = Response(\n zip_bytes,\n status=200,\n mimetype='application/zip',\n headers={\n 'Content-Disposition': f'attachment; filename=\"{filename}\"',\n 'Content-Length': str(len(zip_bytes)),\n # GDPR / privacy headers\n 'Cache-Control': 'no-store, no-cache, must-revalidate, private',\n 'Pragma': 'no-cache',\n 'X-Content-Type-Options': 'nosniff',\n 'X-Robots-Tag': 'none',\n }\n )\n return response\n\n\n# ---------------------------------------------------------------------------\n# DELETE /users/delete\n# ---------------------------------------------------------------------------\n\n@privacy_bp.route('/delete', methods=['DELETE'])\n@jwt_required()\ndef delete_account():\n \"\"\"\n Permanently and irreversibly delete the authenticated user's account\n and all associated data.\n\n Body (JSON):\n confirm (str, required) — must equal the string \"DELETE\"\n reason (str, optional) — reason provided by the user\n\n Workflow:\n 1. Validate confirmation token\n 2. Write audit log entry BEFORE deletion\n 3. Hard-delete all user data (cascade via FK constraints)\n 4. Purge Redis cache keys\n 5. Respond 200 — caller must discard JWT client-side\n \"\"\"\n user_id = int(get_jwt_identity())\n body = request.get_json(silent=True) or {}\n\n if body.get('confirm') != 'DELETE':\n return jsonify({\n 'error': 'Confirmation required',\n 'detail': 'Send JSON body {\"confirm\": \"DELETE\"} to proceed.'\n }), 400\n\n reason = body.get('reason', 'Not provided')\n conn = get_db()\n cur = conn.cursor()\n\n try:\n # ----------------------------------------------------------------\n # 1. Capture a snapshot for the audit record BEFORE deleting\n # ----------------------------------------------------------------\n cur.execute(\n 'SELECT email, full_name, created_at FROM users WHERE id = %s',\n (user_id,)\n )\n row = cur.fetchone()\n if not row:\n cur.close()\n return jsonify({'error': 'User not found'}), 404\n\n email, full_name, created_at = row\n\n # ----------------------------------------------------------------\n # 2. Write the audit log entry (survives user deletion because\n # audit_logs.user_id has ON DELETE SET NULL)\n # ----------------------------------------------------------------\n log_audit_event(\n conn,\n user_id=user_id,\n action='ACCOUNT_DELETE_INITIATED',\n detail={\n 'email': email,\n 'full_name': full_name,\n 'account_created_at': created_at.isoformat() if hasattr(created_at, 'isoformat') else str(created_at),\n 'deletion_reason': reason,\n 'ip': request.remote_addr,\n 'user_agent': request.user_agent.string,\n }\n )\n\n # ----------------------------------------------------------------\n # 3. Hard delete — rely on FK ON DELETE CASCADE for child tables\n # (expenses, bills, reminders, categories, tokens, etc.)\n # ----------------------------------------------------------------\n _hard_delete_user(cur, user_id)\n\n # ----------------------------------------------------------------\n # 4. Final audit record (user_id will be NULL after delete)\n # ----------------------------------------------------------------\n log_audit_event(\n conn,\n user_id=None, # user is gone\n action='ACCOUNT_DELETE_COMPLETED',\n detail={\n 'deleted_user_id': user_id,\n 'email_hash': _hash_email(email),\n 'ip': request.remote_addr,\n }\n )\n\n conn.commit()\n cur.close()\n\n except Exception as exc:\n conn.rollback()\n cur.close()\n logger.exception('Deletion failed for user %s', user_id)\n return jsonify({'error': 'Deletion failed', 'detail': str(exc)}), 500\n\n # ----------------------------------------------------------------\n # 5. Purge Redis cache (best-effort, non-fatal)\n # ----------------------------------------------------------------\n try:\n delete_user_cache(user_id)\n except Exception:\n logger.warning('Redis cache purge failed for deleted \ No newline at end of file