-
Notifications
You must be signed in to change notification settings - Fork 0
fix: reduce scraper and scheduler error log noise #267
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
59559fc
8d03301
fb0564f
67e7966
a3ad14f
2a8d203
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -664,28 +664,56 @@ describe("daily metrics cleanup", () => { | |
| consoleSpy.mockRestore(); | ||
| }); | ||
|
|
||
| it("logs error when cleanup query fails", async () => { | ||
| it("logs error when cleanup fails with non-transient DB error", async () => { | ||
| await startScheduler(); | ||
| mockDbExecute.mockRejectedValueOnce(new Error("DB timeout")); | ||
| mockDbExecute.mockRejectedValueOnce(new Error('relation "monitor_metrics" does not exist')); | ||
| await runCron("0 3 * * *"); | ||
|
|
||
| expect(ErrorLogger.error).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| "monitor_metrics cleanup failed", | ||
| expect.any(Error), | ||
| expect.objectContaining({ | ||
| errorMessage: "DB timeout", | ||
| errorMessage: 'relation "monitor_metrics" does not exist', | ||
| retentionDays: 90, | ||
| table: "monitor_metrics", | ||
| }) | ||
| ); | ||
| }); | ||
|
|
||
| it("logs warning when cleanup fails with transient DB error", async () => { | ||
| await startScheduler(); | ||
| mockDbExecute | ||
| .mockRejectedValueOnce(new Error("Connection terminated")) | ||
| .mockRejectedValueOnce(new Error("Connection terminated")); | ||
| const cronPromise = runCron("0 3 * * *"); | ||
| await vi.advanceTimersByTimeAsync(2000); | ||
| await cronPromise; | ||
|
|
||
| expect(ErrorLogger.warning).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| "monitor_metrics cleanup failed (transient, will retry)", | ||
| expect.objectContaining({ | ||
| errorMessage: "Connection terminated", | ||
| retentionDays: 90, | ||
| table: "monitor_metrics", | ||
| }) | ||
| ); | ||
| // Verify the monitor_metrics cleanup itself didn't log an error (other cleanup tasks may) | ||
| expect(ErrorLogger.error).not.toHaveBeenCalledWith( | ||
| "scheduler", | ||
| "monitor_metrics cleanup failed", | ||
| expect.anything(), | ||
| expect.anything() | ||
| ); | ||
| }); | ||
|
Comment on lines
+684
to
+709
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Tighten transient cleanup assertions (retry + no error log). This case currently validates only the warning payload. It still passes if retry is removed or if Suggested assertion tightening expect(ErrorLogger.warning).toHaveBeenCalledWith(
"scheduler",
"monitor_metrics cleanup failed (transient, will retry)",
expect.objectContaining({
errorMessage: "Connection terminated",
retentionDays: 90,
table: "monitor_metrics",
})
);
+ expect(mockDbExecute).toHaveBeenCalledTimes(2);
+ expect(ErrorLogger.error).not.toHaveBeenCalled();As per coding guidelines, 🤖 Prompt for AI Agents |
||
|
|
||
| it("handles non-Error thrown in cleanup (uses String coercion)", async () => { | ||
| await startScheduler(); | ||
| mockDbExecute.mockRejectedValueOnce("disk full"); | ||
| await runCron("0 3 * * *"); | ||
|
|
||
| // Non-Error values are not transient, so logged as error | ||
| expect(ErrorLogger.error).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| "monitor_metrics cleanup failed", | ||
|
|
@@ -820,6 +848,42 @@ describe("notification queue and digest cron (*/1 * * * *)", () => { | |
| }) | ||
| ); | ||
| }); | ||
|
|
||
| it("logs warning (not error) when processQueuedNotifications fails with transient DB error", async () => { | ||
| // Not wrapped in withDbRetry (to prevent duplicate deliveries), but | ||
| // logSchedulerError still classifies transient errors as warnings. | ||
| mockProcessQueuedNotifications | ||
| .mockRejectedValueOnce(new Error("Connection terminated")); | ||
|
|
||
| await startScheduler(); | ||
| await runCron("*/1 * * * *"); | ||
|
|
||
| expect(ErrorLogger.warning).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| expect.stringContaining("Queued notification processing failed (transient, will retry)"), | ||
| expect.objectContaining({ | ||
| errorMessage: "Connection terminated", | ||
| }) | ||
| ); | ||
| expect(ErrorLogger.error).not.toHaveBeenCalled(); | ||
| }); | ||
|
|
||
| it("logs warning (not error) when processDigestCron fails with transient DB error", async () => { | ||
| mockProcessDigestCron | ||
| .mockRejectedValueOnce(new Error("Connection terminated")); | ||
|
|
||
| await startScheduler(); | ||
| await runCron("*/1 * * * *"); | ||
|
|
||
| expect(ErrorLogger.warning).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| expect.stringContaining("Digest processing failed (transient, will retry)"), | ||
| expect.objectContaining({ | ||
| errorMessage: "Connection terminated", | ||
| }) | ||
| ); | ||
| expect(ErrorLogger.error).not.toHaveBeenCalled(); | ||
| }); | ||
| }); | ||
|
|
||
| describe("stopScheduler", () => { | ||
|
|
@@ -931,7 +995,7 @@ describe("withDbRetry and re-entrancy guards", () => { | |
| ); | ||
| }); | ||
|
|
||
| it("logs error when retry also fails on transient error", async () => { | ||
| it("logs warning when retry also fails on transient error", async () => { | ||
| mockGetAllActiveMonitors | ||
| .mockRejectedValueOnce(new Error("Connection terminated")) | ||
| .mockRejectedValueOnce(new Error("Connection terminated again")); | ||
|
|
@@ -942,11 +1006,11 @@ describe("withDbRetry and re-entrancy guards", () => { | |
| await cronPromise; | ||
|
|
||
| expect(mockGetAllActiveMonitors).toHaveBeenCalledTimes(2); | ||
| expect(ErrorLogger.error).toHaveBeenCalledWith( | ||
| // Transient DB errors are downgraded to warnings via logSchedulerError helper | ||
| expect(ErrorLogger.warning).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| "Scheduler iteration failed", | ||
| expect.any(Error), | ||
| expect.objectContaining({ phase: "fetching active monitors" }) | ||
| expect.stringContaining("Scheduler iteration failed (transient, will retry)"), | ||
| expect.objectContaining({ activeChecks: 0 }) | ||
| ); | ||
coderabbitai[bot] marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| }); | ||
|
|
||
|
|
@@ -1045,6 +1109,34 @@ describe("withDbRetry and re-entrancy guards", () => { | |
| resolveRetries([]); | ||
| await firstRun; | ||
| }); | ||
|
|
||
| it("logs warning (not error) when webhook processing fails with transient DB error", async () => { | ||
| // Both withDbRetry attempts fail with transient error | ||
| mockStorage.getPendingWebhookRetries | ||
| .mockRejectedValueOnce(new Error("Connection terminated")) | ||
| .mockRejectedValueOnce(new Error("Connection terminated")); | ||
|
|
||
| await startScheduler(); | ||
| const callbacks = cronCallbacks["*/1 * * * *"]; | ||
| await callbacks[0](); // notification cron | ||
| const webhookPromise = callbacks[1](); | ||
| await vi.advanceTimersByTimeAsync(2000); | ||
| await webhookPromise; | ||
|
|
||
| expect(ErrorLogger.warning).toHaveBeenCalledWith( | ||
| "scheduler", | ||
| expect.stringContaining("Webhook retry processing failed (transient, will retry)"), | ||
| expect.objectContaining({ | ||
| errorMessage: "Connection terminated", | ||
| }) | ||
| ); | ||
| expect(ErrorLogger.error).not.toHaveBeenCalledWith( | ||
| "scheduler", | ||
| expect.stringContaining("Webhook"), | ||
| expect.anything(), | ||
| expect.anything() | ||
| ); | ||
| }); | ||
| }); | ||
|
|
||
| describe("webhook retry cumulative backoff", () => { | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.