Just a whole lot of crap
This commit is contained in:
428
tests/services/logReader.test.js
Normal file
428
tests/services/logReader.test.js
Normal file
@@ -0,0 +1,428 @@
|
||||
const fs = require("fs").promises;
|
||||
const LogReaderService = require("../../src/services/logReader");
|
||||
|
||||
// Mock fs module
|
||||
jest.mock("fs", () => ({
|
||||
promises: {
|
||||
stat: jest.fn(),
|
||||
readFile: jest.fn(),
|
||||
access: jest.fn(),
|
||||
},
|
||||
watchFile: jest.fn(),
|
||||
unwatchFile: jest.fn(),
|
||||
}));
|
||||
|
||||
describe("LogReaderService", () => {
|
||||
let logReader;
|
||||
let mockLogContent;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
logReader = new LogReaderService("test-progress.md");
|
||||
|
||||
// Mock log content
|
||||
mockLogContent = `# Shopify Price Update Progress Log
|
||||
|
||||
This file tracks the progress of price update operations.
|
||||
|
||||
|
||||
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Price Adjustment: -10%
|
||||
- Started: 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Progress:**
|
||||
- ✅ **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $600 → $540
|
||||
- Compare At Price: $600
|
||||
- Updated: 2025-08-06 20:30:40 UTC
|
||||
- ❌ **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Error: Rate limit exceeded
|
||||
- Failed: 2025-08-06 20:30:41 UTC
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 2
|
||||
- Successful Updates: 1
|
||||
- Failed Updates: 1
|
||||
- Duration: 2 seconds
|
||||
- Completed: 2025-08-06 20:30:42 UTC
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Price Rollback Operation - 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Operation Mode: rollback
|
||||
- Started: 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Progress:**
|
||||
- 🔄 **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $540 → $600 (from Compare At: $600)
|
||||
- Rolled back: 2025-08-06 20:31:07 UTC
|
||||
|
||||
**Rollback Summary:**
|
||||
- Total Products Processed: 1
|
||||
- Total Variants Processed: 1
|
||||
- Eligible Variants: 1
|
||||
- Successful Rollbacks: 1
|
||||
- Failed Rollbacks: 0
|
||||
- Skipped Variants: 0 (no compare-at price)
|
||||
- Duration: 1 seconds
|
||||
- Completed: 2025-08-06 20:31:07 UTC
|
||||
|
||||
---
|
||||
|
||||
|
||||
**Error Analysis - 2025-08-06 20:31:10 UTC**
|
||||
|
||||
**Error Summary by Category:**
|
||||
- Rate Limiting: 1 error
|
||||
|
||||
**Detailed Error Log:**
|
||||
1. **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Category: Rate Limiting
|
||||
- Error: Rate limit exceeded (429)
|
||||
`;
|
||||
});
|
||||
|
||||
describe("File Reading", () => {
|
||||
test("reads and parses log entries successfully", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(fs.stat).toHaveBeenCalledWith("test-progress.md");
|
||||
expect(fs.readFile).toHaveBeenCalledWith("test-progress.md", "utf8");
|
||||
expect(entries).toHaveLength(2); // Two main operations
|
||||
expect(entries[0].type).toBe("rollback"); // Newest first
|
||||
expect(entries[1].type).toBe("update");
|
||||
});
|
||||
|
||||
test("returns empty array when file doesn't exist", async () => {
|
||||
const error = new Error("File not found");
|
||||
error.code = "ENOENT";
|
||||
fs.stat.mockRejectedValue(error);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries).toEqual([]);
|
||||
});
|
||||
|
||||
test("throws error for other file system errors", async () => {
|
||||
const error = new Error("Permission denied");
|
||||
error.code = "EACCES";
|
||||
fs.stat.mockRejectedValue(error);
|
||||
|
||||
await expect(logReader.readLogEntries()).rejects.toThrow(
|
||||
"Permission denied"
|
||||
);
|
||||
});
|
||||
|
||||
test("uses cache when file hasn't changed", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// First call
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call with same mtime
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1); // Should use cache
|
||||
});
|
||||
|
||||
test("refreshes cache when file has changed", async () => {
|
||||
const oldStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
const newStats = { mtime: new Date("2025-08-06T20:33:00Z") };
|
||||
|
||||
fs.stat.mockResolvedValueOnce(oldStats).mockResolvedValueOnce(newStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// First call
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call with different mtime
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(2); // Should refresh cache
|
||||
});
|
||||
});
|
||||
|
||||
describe("Log Parsing", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("parses operation headers correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.title).toBe(
|
||||
"Price Update Operation - 2025-08-06 20:30:39 UTC"
|
||||
);
|
||||
expect(updateOp.level).toBe("INFO");
|
||||
expect(updateOp.message).toBe(
|
||||
"Started: Price Update Operation - 2025-08-06 20:30:39 UTC"
|
||||
);
|
||||
|
||||
const rollbackOp = entries.find((e) => e.type === "rollback");
|
||||
expect(rollbackOp.title).toBe(
|
||||
"Price Rollback Operation - 2025-08-06 20:31:06 UTC"
|
||||
);
|
||||
});
|
||||
|
||||
test("parses configuration sections correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.configuration["Target Tag"]).toBe("Collection-Snowboard");
|
||||
expect(updateOp.configuration["Price Adjustment"]).toBe("-10%");
|
||||
expect(updateOp.details).toContain("Target Tag: Collection-Snowboard");
|
||||
});
|
||||
|
||||
test("parses timestamps correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.timestamp).toEqual(new Date("2025-08-06T20:30:39Z"));
|
||||
expect(updateOp.rawTimestamp).toBe("2025-08-06 20:30:39 UTC");
|
||||
});
|
||||
|
||||
test("identifies operation types correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries.some((e) => e.type === "update")).toBe(true);
|
||||
expect(entries.some((e) => e.type === "rollback")).toBe(true);
|
||||
});
|
||||
|
||||
test("sorts entries by timestamp (newest first)", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
// Rollback operation (2025-08-06 20:31:06) should come before update (2025-08-06 20:30:39)
|
||||
expect(entries[0].type).toBe("rollback");
|
||||
expect(entries[1].type).toBe("update");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Pagination", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("returns paginated results correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
page: 0,
|
||||
pageSize: 1,
|
||||
levelFilter: "ALL",
|
||||
searchTerm: "",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(1);
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.pagination.pageSize).toBe(1);
|
||||
expect(result.pagination.totalEntries).toBe(2);
|
||||
expect(result.pagination.totalPages).toBe(2);
|
||||
expect(result.pagination.hasNextPage).toBe(true);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
});
|
||||
|
||||
test("handles second page correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
page: 1,
|
||||
pageSize: 1,
|
||||
levelFilter: "ALL",
|
||||
searchTerm: "",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(1);
|
||||
expect(result.pagination.currentPage).toBe(1);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(true);
|
||||
});
|
||||
|
||||
test("uses default pagination options", async () => {
|
||||
const result = await logReader.getPaginatedEntries();
|
||||
|
||||
expect(result.pagination.pageSize).toBe(20);
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.filters.levelFilter).toBe("ALL");
|
||||
expect(result.filters.searchTerm).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Filtering", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("filters by log level correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
levelFilter: "INFO",
|
||||
});
|
||||
|
||||
expect(result.entries.every((e) => e.level === "INFO")).toBe(true);
|
||||
expect(result.filters.levelFilter).toBe("INFO");
|
||||
});
|
||||
|
||||
test("filters by search term in message", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "rollback",
|
||||
});
|
||||
|
||||
expect(result.entries.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.entries.some(
|
||||
(e) =>
|
||||
e.message.toLowerCase().includes("rollback") ||
|
||||
e.title.toLowerCase().includes("rollback")
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test("filters by search term in details", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "Collection-Snowboard",
|
||||
});
|
||||
|
||||
expect(result.entries.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.entries.some((e) => e.details.includes("Collection-Snowboard"))
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test("returns empty results for non-matching filters", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "nonexistent-term-xyz",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(0);
|
||||
expect(result.pagination.totalEntries).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Statistics", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("calculates log statistics correctly", async () => {
|
||||
const stats = await logReader.getLogStatistics();
|
||||
|
||||
expect(stats.totalEntries).toBe(2);
|
||||
expect(stats.byLevel.INFO).toBe(2);
|
||||
expect(stats.byType.update).toBe(1);
|
||||
expect(stats.byType.rollback).toBe(1);
|
||||
expect(stats.operations.total).toBe(2);
|
||||
});
|
||||
|
||||
test("tracks date range correctly", async () => {
|
||||
const stats = await logReader.getLogStatistics();
|
||||
|
||||
expect(stats.dateRange.oldest).toEqual(new Date("2025-08-06T20:30:39Z"));
|
||||
expect(stats.dateRange.newest).toEqual(new Date("2025-08-06T20:31:06Z"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cache Management", () => {
|
||||
test("clears cache when requested", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// Load data to populate cache
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Clear cache and load again
|
||||
logReader.clearCache();
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File Watching", () => {
|
||||
test("sets up file watching correctly", () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCleanup = jest.fn();
|
||||
|
||||
require("fs").watchFile.mockReturnValue(mockCleanup);
|
||||
|
||||
const cleanup = logReader.watchFile(mockCallback);
|
||||
|
||||
expect(require("fs").watchFile).toHaveBeenCalledWith(
|
||||
"test-progress.md",
|
||||
expect.any(Function)
|
||||
);
|
||||
expect(typeof cleanup).toBe("function");
|
||||
});
|
||||
|
||||
test("returns no-op cleanup function when watching fails", () => {
|
||||
require("fs").watchFile.mockImplementation(() => {
|
||||
throw new Error("Watch failed");
|
||||
});
|
||||
|
||||
const cleanup = logReader.watchFile(() => {});
|
||||
|
||||
expect(typeof cleanup).toBe("function");
|
||||
// Should not throw when called
|
||||
expect(() => cleanup()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling", () => {
|
||||
test("handles malformed log content gracefully", async () => {
|
||||
const malformedContent =
|
||||
"This is not a valid log format\nRandom text\n## Invalid header";
|
||||
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(malformedContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
// Should return empty array or minimal parsed data without throwing
|
||||
expect(Array.isArray(entries)).toBe(true);
|
||||
});
|
||||
|
||||
test("handles invalid timestamps gracefully", async () => {
|
||||
const invalidTimestampContent = `## Price Update Operation - invalid-timestamp
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: test
|
||||
|
||||
**Progress:**
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 0
|
||||
`;
|
||||
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(invalidTimestampContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries).toHaveLength(1);
|
||||
expect(entries[0].timestamp).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user