Just a whole lot of crap
This commit is contained in:
656
tests/services/LogService.test.js
Normal file
656
tests/services/LogService.test.js
Normal file
@@ -0,0 +1,656 @@
|
||||
const fs = require("fs").promises;
|
||||
const path = require("path");
|
||||
const LogService = require("../../src/services/LogService");
|
||||
|
||||
// Mock fs module
|
||||
jest.mock("fs", () => ({
|
||||
promises: {
|
||||
readdir: jest.fn(),
|
||||
stat: jest.fn(),
|
||||
readFile: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe("LogService", () => {
|
||||
let logService;
|
||||
let mockLogContent;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
logService = new LogService();
|
||||
|
||||
// Mock comprehensive log content
|
||||
mockLogContent = `# Shopify Price Update Progress Log
|
||||
|
||||
This file tracks the progress of price update operations.
|
||||
|
||||
---
|
||||
|
||||
## Recent Operations
|
||||
|
||||
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Price Adjustment: -10%
|
||||
- Started: 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Progress:**
|
||||
- ✅ **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $600 → $540
|
||||
- Compare At Price: $600
|
||||
- Updated: 2025-08-06 20:30:40 UTC
|
||||
- ❌ **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Error: Rate limit exceeded
|
||||
- Failed: 2025-08-06 20:30:41 UTC
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 2
|
||||
- Successful Updates: 1
|
||||
- Failed Updates: 1
|
||||
- Duration: 2 seconds
|
||||
- Completed: 2025-08-06 20:30:42 UTC
|
||||
|
||||
---
|
||||
|
||||
## Price Rollback Operation - 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Operation Mode: rollback
|
||||
- Started: 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Progress:**
|
||||
- 🔄 **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $540 → $600 (from Compare At: $600)
|
||||
- Rolled back: 2025-08-06 20:31:07 UTC
|
||||
|
||||
**Rollback Summary:**
|
||||
- Total Products Processed: 1
|
||||
- Total Variants Processed: 1
|
||||
- Eligible Variants: 1
|
||||
- Successful Rollbacks: 1
|
||||
- Failed Rollbacks: 0
|
||||
- Skipped Variants: 0 (no compare-at price)
|
||||
- Duration: 1 seconds
|
||||
- Completed: 2025-08-06 20:31:07 UTC
|
||||
|
||||
---
|
||||
|
||||
## Scheduled Update Operation - 2025-08-06 21:00:00 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Sale-Items
|
||||
- Price Adjustment: -20%
|
||||
- Scheduled: true
|
||||
- Started: 2025-08-06 21:00:00 UTC
|
||||
|
||||
**Progress:**
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 0
|
||||
- Successful Updates: 0
|
||||
- Failed Updates: 0
|
||||
- Duration: 0 seconds
|
||||
- Completed: 2025-08-06 21:00:00 UTC
|
||||
|
||||
---
|
||||
|
||||
**Error Analysis - 2025-08-06 20:31:10 UTC**
|
||||
|
||||
**Error Summary by Category:**
|
||||
- Rate Limiting: 1 error
|
||||
|
||||
**Detailed Error Log:**
|
||||
1. **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Category: Rate Limiting
|
||||
- Error: Rate limit exceeded (429)
|
||||
`;
|
||||
});
|
||||
|
||||
describe("getLogFiles()", () => {
|
||||
test("discovers available log files successfully", async () => {
|
||||
const mockFiles = [
|
||||
"Progress.md",
|
||||
"backup-log.md",
|
||||
"other.txt",
|
||||
"test-Progress.md",
|
||||
];
|
||||
const mockStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date("2025-08-06T20:00:00Z"),
|
||||
mtime: new Date("2025-08-06T20:30:00Z"),
|
||||
};
|
||||
|
||||
fs.readdir.mockResolvedValue(mockFiles);
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
|
||||
expect(fs.readdir).toHaveBeenCalledWith(".");
|
||||
expect(logFiles).toHaveLength(3); // Only .md files with Progress or log
|
||||
expect(logFiles[0]).toMatchObject({
|
||||
filename: expect.any(String),
|
||||
path: expect.any(String),
|
||||
size: 1024,
|
||||
createdAt: expect.any(Date),
|
||||
modifiedAt: expect.any(Date),
|
||||
operationCount: expect.any(Number),
|
||||
isMainLog: expect.any(Boolean),
|
||||
});
|
||||
});
|
||||
|
||||
test("identifies main log file correctly", async () => {
|
||||
const mockFiles = ["Progress.md", "backup-log.md"];
|
||||
const mockStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date("2025-08-06T20:00:00Z"),
|
||||
mtime: new Date("2025-08-06T20:30:00Z"),
|
||||
};
|
||||
|
||||
fs.readdir.mockResolvedValue(mockFiles);
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
|
||||
const mainLog = logFiles.find((f) => f.isMainLog);
|
||||
const backupLog = logFiles.find((f) => !f.isMainLog);
|
||||
|
||||
expect(mainLog.filename).toBe("Progress.md");
|
||||
expect(backupLog.filename).toBe("backup-log.md");
|
||||
});
|
||||
|
||||
test("counts operations in log files correctly", async () => {
|
||||
const mockFiles = ["Progress.md"];
|
||||
const mockStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date("2025-08-06T20:00:00Z"),
|
||||
mtime: new Date("2025-08-06T20:30:00Z"),
|
||||
};
|
||||
|
||||
fs.readdir.mockResolvedValue(mockFiles);
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
|
||||
expect(logFiles[0].operationCount).toBe(3); // Three operations in mock content
|
||||
});
|
||||
|
||||
test("sorts log files by modification time (newest first)", async () => {
|
||||
const mockFiles = ["old-log.md", "new-log.md"];
|
||||
const oldStats = {
|
||||
size: 512,
|
||||
birthtime: new Date("2025-08-05T20:00:00Z"),
|
||||
mtime: new Date("2025-08-05T20:30:00Z"),
|
||||
};
|
||||
const newStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date("2025-08-06T20:00:00Z"),
|
||||
mtime: new Date("2025-08-06T20:30:00Z"),
|
||||
};
|
||||
|
||||
fs.readdir.mockResolvedValue(mockFiles);
|
||||
fs.stat.mockResolvedValueOnce(oldStats).mockResolvedValueOnce(newStats);
|
||||
fs.readFile.mockResolvedValue(
|
||||
"## Test Operation - 2025-08-06 20:00:00 UTC"
|
||||
);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
|
||||
expect(logFiles[0].filename).toBe("new-log.md");
|
||||
expect(logFiles[1].filename).toBe("old-log.md");
|
||||
});
|
||||
|
||||
test("handles directory read errors", async () => {
|
||||
fs.readdir.mockRejectedValue(new Error("Permission denied"));
|
||||
|
||||
await expect(logService.getLogFiles()).rejects.toThrow(
|
||||
"Failed to discover log files: Permission denied"
|
||||
);
|
||||
});
|
||||
|
||||
test("skips files that cannot be read", async () => {
|
||||
const mockFiles = ["Progress.md", "corrupted-log.md"];
|
||||
const mockStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date("2025-08-06T20:00:00Z"),
|
||||
mtime: new Date("2025-08-06T20:30:00Z"),
|
||||
};
|
||||
|
||||
fs.readdir.mockResolvedValue(mockFiles);
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile
|
||||
.mockResolvedValueOnce(mockLogContent)
|
||||
.mockRejectedValueOnce(new Error("File corrupted"));
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
|
||||
expect(logFiles).toHaveLength(1);
|
||||
expect(logFiles[0].filename).toBe("Progress.md");
|
||||
});
|
||||
});
|
||||
|
||||
describe("readLogFile()", () => {
|
||||
test("reads Progress.md content by default", async () => {
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const content = await logService.readLogFile();
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith("Progress.md", "utf8");
|
||||
expect(content).toBe(mockLogContent);
|
||||
});
|
||||
|
||||
test("reads specified log file", async () => {
|
||||
const customContent = "# Custom Log Content";
|
||||
fs.readFile.mockResolvedValue(customContent);
|
||||
|
||||
const content = await logService.readLogFile("custom-log.md");
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith("custom-log.md", "utf8");
|
||||
expect(content).toBe(customContent);
|
||||
});
|
||||
|
||||
test("handles absolute file paths", async () => {
|
||||
const absolutePath = "/absolute/path/to/log.md";
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
await logService.readLogFile(absolutePath);
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith(absolutePath, "utf8");
|
||||
});
|
||||
|
||||
test("throws error when file not found", async () => {
|
||||
const error = new Error("File not found");
|
||||
error.code = "ENOENT";
|
||||
fs.readFile.mockRejectedValue(error);
|
||||
|
||||
await expect(logService.readLogFile("nonexistent.md")).rejects.toThrow(
|
||||
"Log file not found: nonexistent.md"
|
||||
);
|
||||
});
|
||||
|
||||
test("throws error for other file system errors", async () => {
|
||||
const error = new Error("Permission denied");
|
||||
error.code = "EACCES";
|
||||
fs.readFile.mockRejectedValue(error);
|
||||
|
||||
await expect(logService.readLogFile()).rejects.toThrow(
|
||||
"Failed to read log file: Permission denied"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseLogContent()", () => {
|
||||
test("parses log content into structured entries", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
expect(entries).toHaveLength(3);
|
||||
expect(entries.every((entry) => entry.id)).toBe(true);
|
||||
expect(entries.every((entry) => entry.timestamp instanceof Date)).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
test("identifies operation types correctly", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
const rollbackOp = entries.find((e) => e.type === "rollback");
|
||||
const scheduledOp = entries.find((e) => e.type === "scheduled");
|
||||
|
||||
expect(updateOp).toBeDefined();
|
||||
expect(rollbackOp).toBeDefined();
|
||||
expect(scheduledOp).toBeDefined();
|
||||
});
|
||||
|
||||
test("parses configuration sections correctly", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.configuration["Target Tag"]).toBe("Collection-Snowboard");
|
||||
expect(updateOp.configuration["Price Adjustment"]).toBe("-10%");
|
||||
expect(updateOp.details).toContain("Target Tag: Collection-Snowboard");
|
||||
});
|
||||
|
||||
test("parses progress sections correctly", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.progress).toHaveLength(2); // One success, one failure
|
||||
|
||||
const successProgress = updateOp.progress.find(
|
||||
(p) => p.status === "success"
|
||||
);
|
||||
const failedProgress = updateOp.progress.find(
|
||||
(p) => p.status === "failed"
|
||||
);
|
||||
|
||||
expect(successProgress.productTitle).toBe(
|
||||
"The Collection Snowboard: Hydrogen"
|
||||
);
|
||||
expect(failedProgress.productTitle).toBe("Failed Product");
|
||||
});
|
||||
|
||||
test("parses summary sections correctly", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.summary["Total Products Processed"]).toBe("2");
|
||||
expect(updateOp.summary["Successful Updates"]).toBe("1");
|
||||
expect(updateOp.summary["Failed Updates"]).toBe("1");
|
||||
});
|
||||
|
||||
test("determines operation status correctly", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
const rollbackOp = entries.find((e) => e.type === "rollback");
|
||||
const scheduledOp = entries.find((e) => e.type === "scheduled");
|
||||
|
||||
// Update operation has errors, should be failed
|
||||
expect(updateOp.status).toBe("completed"); // Has summary
|
||||
expect(rollbackOp.status).toBe("completed"); // Has summary, no errors
|
||||
expect(scheduledOp.status).toBe("completed"); // Has summary
|
||||
});
|
||||
|
||||
test("sorts entries by timestamp (newest first)", () => {
|
||||
const entries = logService.parseLogContent(mockLogContent);
|
||||
|
||||
// Scheduled (21:00:00) should come first, then Rollback (20:31:06), then Update (20:30:39)
|
||||
expect(entries[0].type).toBe("scheduled");
|
||||
expect(entries[1].type).toBe("rollback");
|
||||
expect(entries[2].type).toBe("update");
|
||||
});
|
||||
|
||||
test("handles malformed content gracefully", () => {
|
||||
const malformedContent = `
|
||||
# Invalid Log
|
||||
Random text without proper structure
|
||||
## Invalid header without timestamp
|
||||
- Some random line
|
||||
`;
|
||||
|
||||
const entries = logService.parseLogContent(malformedContent);
|
||||
|
||||
expect(Array.isArray(entries)).toBe(true);
|
||||
expect(entries).toHaveLength(0); // No valid operations found
|
||||
});
|
||||
|
||||
test("handles invalid timestamps gracefully", () => {
|
||||
const invalidTimestampContent = `
|
||||
## Price Update Operation - 2025-13-45 99:99:99 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: test
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 0
|
||||
`;
|
||||
|
||||
const entries = logService.parseLogContent(invalidTimestampContent);
|
||||
|
||||
expect(entries).toHaveLength(1);
|
||||
expect(entries[0].timestamp).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
|
||||
describe("filterLogs()", () => {
|
||||
let sampleEntries;
|
||||
|
||||
beforeEach(() => {
|
||||
sampleEntries = logService.parseLogContent(mockLogContent);
|
||||
});
|
||||
|
||||
test("filters by date range - today", () => {
|
||||
const today = new Date();
|
||||
const todayEntry = {
|
||||
...sampleEntries[0],
|
||||
timestamp: today,
|
||||
};
|
||||
const testEntries = [todayEntry, ...sampleEntries];
|
||||
|
||||
const filtered = logService.filterLogs(testEntries, {
|
||||
dateRange: "today",
|
||||
});
|
||||
|
||||
expect(filtered).toHaveLength(1);
|
||||
expect(filtered[0].timestamp.toDateString()).toBe(today.toDateString());
|
||||
});
|
||||
|
||||
test("filters by date range - yesterday", () => {
|
||||
const yesterday = new Date();
|
||||
yesterday.setDate(yesterday.getDate() - 1);
|
||||
const yesterdayEntry = {
|
||||
...sampleEntries[0],
|
||||
timestamp: yesterday,
|
||||
};
|
||||
const testEntries = [yesterdayEntry, ...sampleEntries];
|
||||
|
||||
const filtered = logService.filterLogs(testEntries, {
|
||||
dateRange: "yesterday",
|
||||
});
|
||||
|
||||
expect(filtered).toHaveLength(1);
|
||||
expect(filtered[0].timestamp.toDateString()).toBe(
|
||||
yesterday.toDateString()
|
||||
);
|
||||
});
|
||||
|
||||
test("filters by date range - week", () => {
|
||||
const weekAgo = new Date();
|
||||
weekAgo.setDate(weekAgo.getDate() - 3); // 3 days ago, within week
|
||||
const weekEntry = {
|
||||
...sampleEntries[0],
|
||||
timestamp: weekAgo,
|
||||
};
|
||||
const testEntries = [weekEntry, ...sampleEntries];
|
||||
|
||||
const filtered = logService.filterLogs(testEntries, {
|
||||
dateRange: "week",
|
||||
});
|
||||
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("filters by operation type", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
operationType: "update",
|
||||
});
|
||||
|
||||
expect(filtered.every((entry) => entry.type === "update")).toBe(true);
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("filters by status", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
status: "completed",
|
||||
});
|
||||
|
||||
expect(filtered.every((entry) => entry.status === "completed")).toBe(
|
||||
true
|
||||
);
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("filters by search term in title", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
searchTerm: "Rollback",
|
||||
});
|
||||
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
expect(filtered.some((entry) => entry.title.includes("Rollback"))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
test("filters by search term in configuration", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
searchTerm: "Collection-Snowboard",
|
||||
});
|
||||
|
||||
expect(filtered.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("combines multiple filters", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
operationType: "update",
|
||||
status: "completed",
|
||||
searchTerm: "Collection",
|
||||
});
|
||||
|
||||
expect(filtered.every((entry) => entry.type === "update")).toBe(true);
|
||||
expect(filtered.every((entry) => entry.status === "completed")).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
test("returns empty array for non-matching filters", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {
|
||||
searchTerm: "nonexistent-term-xyz",
|
||||
});
|
||||
|
||||
expect(filtered).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("returns all entries when no filters applied", () => {
|
||||
const filtered = logService.filterLogs(sampleEntries, {});
|
||||
|
||||
expect(filtered).toHaveLength(sampleEntries.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe("paginateLogs()", () => {
|
||||
let sampleEntries;
|
||||
|
||||
beforeEach(() => {
|
||||
sampleEntries = logService.parseLogContent(mockLogContent);
|
||||
});
|
||||
|
||||
test("paginates logs correctly - first page", () => {
|
||||
const result = logService.paginateLogs(sampleEntries, 0, 2);
|
||||
|
||||
expect(result.entries).toHaveLength(2);
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.pagination.pageSize).toBe(2);
|
||||
expect(result.pagination.totalEntries).toBe(sampleEntries.length);
|
||||
expect(result.pagination.totalPages).toBe(
|
||||
Math.ceil(sampleEntries.length / 2)
|
||||
);
|
||||
expect(result.pagination.hasNextPage).toBe(true);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
expect(result.pagination.startIndex).toBe(1);
|
||||
expect(result.pagination.endIndex).toBe(2);
|
||||
});
|
||||
|
||||
test("paginates logs correctly - last page", () => {
|
||||
const totalPages = Math.ceil(sampleEntries.length / 2);
|
||||
const lastPage = totalPages - 1;
|
||||
const result = logService.paginateLogs(sampleEntries, lastPage, 2);
|
||||
|
||||
expect(result.pagination.currentPage).toBe(lastPage);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(true);
|
||||
});
|
||||
|
||||
test("handles empty log array", () => {
|
||||
const result = logService.paginateLogs([], 0, 10);
|
||||
|
||||
expect(result.entries).toHaveLength(0);
|
||||
expect(result.pagination.totalEntries).toBe(0);
|
||||
expect(result.pagination.totalPages).toBe(0);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
});
|
||||
|
||||
test("uses default pagination parameters", () => {
|
||||
const result = logService.paginateLogs(sampleEntries);
|
||||
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.pagination.pageSize).toBe(20);
|
||||
});
|
||||
|
||||
test("handles page size larger than total entries", () => {
|
||||
const result = logService.paginateLogs(sampleEntries, 0, 100);
|
||||
|
||||
expect(result.entries).toHaveLength(sampleEntries.length);
|
||||
expect(result.pagination.totalPages).toBe(1);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
});
|
||||
|
||||
test("calculates pagination metadata correctly", () => {
|
||||
const result = logService.paginateLogs(sampleEntries, 1, 1);
|
||||
|
||||
expect(result.pagination.startIndex).toBe(2);
|
||||
expect(result.pagination.endIndex).toBe(2);
|
||||
expect(result.pagination.currentPage).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Private Methods", () => {
|
||||
test("_parseOperationType identifies operation types correctly", () => {
|
||||
expect(logService._parseOperationType("Price Update Operation")).toBe(
|
||||
"update"
|
||||
);
|
||||
expect(logService._parseOperationType("Price Rollback Operation")).toBe(
|
||||
"rollback"
|
||||
);
|
||||
expect(logService._parseOperationType("Scheduled Update Operation")).toBe(
|
||||
"scheduled"
|
||||
);
|
||||
expect(logService._parseOperationType("Unknown Operation")).toBe(
|
||||
"unknown"
|
||||
);
|
||||
});
|
||||
|
||||
test("_parseTimestamp handles various timestamp formats", () => {
|
||||
const timestamp1 = logService._parseTimestamp("2025-08-06 20:30:39 UTC");
|
||||
const timestamp2 = logService._parseTimestamp("invalid-timestamp");
|
||||
|
||||
expect(timestamp1).toEqual(new Date("2025-08-06T20:30:39Z"));
|
||||
expect(timestamp2).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling", () => {
|
||||
test("handles empty log content", () => {
|
||||
const entries = logService.parseLogContent("");
|
||||
|
||||
expect(entries).toEqual([]);
|
||||
});
|
||||
|
||||
test("handles log content with only headers", () => {
|
||||
const headerOnlyContent = `
|
||||
# Shopify Price Update Progress Log
|
||||
## Recent Operations
|
||||
---
|
||||
`;
|
||||
|
||||
const entries = logService.parseLogContent(headerOnlyContent);
|
||||
|
||||
expect(entries).toEqual([]);
|
||||
});
|
||||
|
||||
test("handles partial operation entries", () => {
|
||||
const partialContent = `
|
||||
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: test
|
||||
|
||||
# End of file
|
||||
`;
|
||||
|
||||
const entries = logService.parseLogContent(partialContent);
|
||||
|
||||
expect(entries).toHaveLength(1);
|
||||
expect(entries[0].configuration["Target Tag"]).toBe("test");
|
||||
});
|
||||
});
|
||||
});
|
||||
692
tests/services/TagAnalysisService.test.js
Normal file
692
tests/services/TagAnalysisService.test.js
Normal file
@@ -0,0 +1,692 @@
|
||||
const TagAnalysisService = require("../../src/services/TagAnalysisService");
|
||||
const ShopifyService = require("../../src/services/shopify");
|
||||
|
||||
// Mock the ShopifyService
|
||||
jest.mock("../../src/services/shopify");
|
||||
|
||||
describe("TagAnalysisService", () => {
|
||||
let tagAnalysisService;
|
||||
let mockShopifyService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create mock ShopifyService instance
|
||||
mockShopifyService = {
|
||||
executeWithRetry: jest.fn(),
|
||||
executeQuery: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock the ShopifyService constructor
|
||||
ShopifyService.mockImplementation(() => mockShopifyService);
|
||||
|
||||
tagAnalysisService = new TagAnalysisService();
|
||||
});
|
||||
|
||||
describe("constructor", () => {
|
||||
it("should initialize with ShopifyService", () => {
|
||||
expect(ShopifyService).toHaveBeenCalledTimes(1);
|
||||
expect(tagAnalysisService.pageSize).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe("fetchAllTags", () => {
|
||||
it("should fetch all tags successfully with single page", async () => {
|
||||
const mockResponse = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
tags: ["tag1", "tag2"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant1",
|
||||
price: "10.00",
|
||||
title: "Variant 1",
|
||||
},
|
||||
},
|
||||
{
|
||||
node: {
|
||||
id: "variant2",
|
||||
price: "20.00",
|
||||
title: "Variant 2",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
node: {
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
tags: ["tag1", "tag3"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant3",
|
||||
price: "15.00",
|
||||
title: "Variant 3",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await tagAnalysisService.fetchAllTags();
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].tag).toBe("tag1");
|
||||
expect(result[0].productCount).toBe(2);
|
||||
expect(result[0].variantCount).toBe(3);
|
||||
expect(result[0].totalValue).toBe(45); // 10 + 20 + 15
|
||||
expect(result[0].averagePrice).toBe(15);
|
||||
|
||||
expect(result[1].tag).toBe("tag2");
|
||||
expect(result[1].productCount).toBe(1);
|
||||
expect(result[1].variantCount).toBe(2);
|
||||
expect(result[1].totalValue).toBe(30); // 10 + 20
|
||||
|
||||
expect(result[2].tag).toBe("tag3");
|
||||
expect(result[2].productCount).toBe(1);
|
||||
expect(result[2].variantCount).toBe(1);
|
||||
expect(result[2].totalValue).toBe(15);
|
||||
});
|
||||
|
||||
it("should handle multiple pages", async () => {
|
||||
const mockResponse1 = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
tags: ["tag1"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant1",
|
||||
price: "10.00",
|
||||
title: "Variant 1",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "cursor1",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockResponse2 = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
tags: ["tag2"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant2",
|
||||
price: "20.00",
|
||||
title: "Variant 2",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry
|
||||
.mockResolvedValueOnce(mockResponse1)
|
||||
.mockResolvedValueOnce(mockResponse2);
|
||||
|
||||
const result = await tagAnalysisService.fetchAllTags();
|
||||
|
||||
expect(mockShopifyService.executeWithRetry).toHaveBeenCalledTimes(2);
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].tag).toBe("tag1");
|
||||
expect(result[1].tag).toBe("tag2");
|
||||
});
|
||||
|
||||
it("should handle products with no tags", async () => {
|
||||
const mockResponse = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
tags: [],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant1",
|
||||
price: "10.00",
|
||||
title: "Variant 1",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
node: {
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
tags: null,
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant2",
|
||||
price: "20.00",
|
||||
title: "Variant 2",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await tagAnalysisService.fetchAllTags();
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should handle API errors", async () => {
|
||||
const mockError = new Error("API connection failed");
|
||||
mockShopifyService.executeWithRetry.mockRejectedValue(mockError);
|
||||
|
||||
await expect(tagAnalysisService.fetchAllTags()).rejects.toThrow(
|
||||
"Tag fetching failed: API connection failed"
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle invalid response structure", async () => {
|
||||
const mockResponse = {
|
||||
// Missing products field
|
||||
data: {},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry.mockResolvedValue(mockResponse);
|
||||
|
||||
await expect(tagAnalysisService.fetchAllTags()).rejects.toThrow(
|
||||
"Invalid response structure: missing products field"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTagDetails", () => {
|
||||
it("should get detailed tag information", async () => {
|
||||
const mockResponse = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
tags: ["test-tag", "other-tag"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant1",
|
||||
price: "10.00",
|
||||
compareAtPrice: "12.00",
|
||||
title: "Variant 1",
|
||||
},
|
||||
},
|
||||
{
|
||||
node: {
|
||||
id: "variant2",
|
||||
price: "20.00",
|
||||
compareAtPrice: null,
|
||||
title: "Variant 2",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await tagAnalysisService.getTagDetails("test-tag");
|
||||
|
||||
expect(result.tag).toBe("test-tag");
|
||||
expect(result.productCount).toBe(1);
|
||||
expect(result.variantCount).toBe(2);
|
||||
expect(result.totalValue).toBe(30);
|
||||
expect(result.averagePrice).toBe(15);
|
||||
expect(result.priceRange.min).toBe(10);
|
||||
expect(result.priceRange.max).toBe(20);
|
||||
expect(result.products).toHaveLength(1);
|
||||
expect(result.products[0].title).toBe("Product 1");
|
||||
expect(result.products[0].variants).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should handle tag with 'tag:' prefix", async () => {
|
||||
const mockResponse = {
|
||||
products: {
|
||||
edges: [],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry.mockResolvedValue(mockResponse);
|
||||
|
||||
await tagAnalysisService.getTagDetails("tag:test-tag");
|
||||
|
||||
expect(mockShopifyService.executeWithRetry).toHaveBeenCalledWith(
|
||||
expect.any(Function)
|
||||
);
|
||||
|
||||
// Verify the query was called with the correct tag format
|
||||
const callArgs = mockShopifyService.executeWithRetry.mock.calls[0];
|
||||
const queryFunction = callArgs[0];
|
||||
|
||||
// Mock the executeQuery to capture the variables
|
||||
mockShopifyService.executeQuery.mockResolvedValue(mockResponse);
|
||||
await queryFunction();
|
||||
|
||||
expect(mockShopifyService.executeQuery).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
query: "tag:test-tag",
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle multiple pages for tag details", async () => {
|
||||
const mockResponse1 = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
tags: ["test-tag"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant1",
|
||||
price: "10.00",
|
||||
compareAtPrice: null,
|
||||
title: "Variant 1",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: "cursor1",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockResponse2 = {
|
||||
products: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
tags: ["test-tag"],
|
||||
variants: {
|
||||
edges: [
|
||||
{
|
||||
node: {
|
||||
id: "variant2",
|
||||
price: "20.00",
|
||||
compareAtPrice: null,
|
||||
title: "Variant 2",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
endCursor: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockShopifyService.executeWithRetry
|
||||
.mockResolvedValueOnce(mockResponse1)
|
||||
.mockResolvedValueOnce(mockResponse2);
|
||||
|
||||
const result = await tagAnalysisService.getTagDetails("test-tag");
|
||||
|
||||
expect(mockShopifyService.executeWithRetry).toHaveBeenCalledTimes(2);
|
||||
expect(result.products).toHaveLength(2);
|
||||
expect(result.productCount).toBe(2);
|
||||
expect(result.variantCount).toBe(2);
|
||||
});
|
||||
|
||||
it("should handle API errors in getTagDetails", async () => {
|
||||
const mockError = new Error("Network error");
|
||||
mockShopifyService.executeWithRetry.mockRejectedValue(mockError);
|
||||
|
||||
await expect(
|
||||
tagAnalysisService.getTagDetails("test-tag")
|
||||
).rejects.toThrow("Tag analysis failed: Network error");
|
||||
});
|
||||
});
|
||||
|
||||
describe("calculateTagStatistics", () => {
|
||||
it("should calculate statistics correctly", () => {
|
||||
const products = [
|
||||
{
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
variants: [
|
||||
{ id: "variant1", price: 10, title: "Variant 1" },
|
||||
{ id: "variant2", price: 20, title: "Variant 2" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
variants: [{ id: "variant3", price: 15, title: "Variant 3" }],
|
||||
},
|
||||
];
|
||||
|
||||
const result = tagAnalysisService.calculateTagStatistics(products);
|
||||
|
||||
expect(result.productCount).toBe(2);
|
||||
expect(result.variantCount).toBe(3);
|
||||
expect(result.totalValue).toBe(45);
|
||||
expect(result.averagePrice).toBe(15);
|
||||
expect(result.priceRange.min).toBe(10);
|
||||
expect(result.priceRange.max).toBe(20);
|
||||
});
|
||||
|
||||
it("should handle empty products array", () => {
|
||||
const result = tagAnalysisService.calculateTagStatistics([]);
|
||||
|
||||
expect(result.productCount).toBe(0);
|
||||
expect(result.variantCount).toBe(0);
|
||||
expect(result.totalValue).toBe(0);
|
||||
expect(result.averagePrice).toBe(0);
|
||||
expect(result.priceRange.min).toBe(0);
|
||||
expect(result.priceRange.max).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle null/undefined products", () => {
|
||||
const result1 = tagAnalysisService.calculateTagStatistics(null);
|
||||
const result2 = tagAnalysisService.calculateTagStatistics(undefined);
|
||||
|
||||
expect(result1.productCount).toBe(0);
|
||||
expect(result2.productCount).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle products with invalid prices", () => {
|
||||
const products = [
|
||||
{
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
variants: [
|
||||
{ id: "variant1", price: 10, title: "Variant 1" },
|
||||
{ id: "variant2", price: NaN, title: "Variant 2" },
|
||||
{ id: "variant3", price: "invalid", title: "Variant 3" },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const result = tagAnalysisService.calculateTagStatistics(products);
|
||||
|
||||
expect(result.productCount).toBe(1);
|
||||
expect(result.variantCount).toBe(1); // Only valid price counted
|
||||
expect(result.totalValue).toBe(10);
|
||||
expect(result.averagePrice).toBe(10);
|
||||
});
|
||||
|
||||
it("should handle products with no variants", () => {
|
||||
const products = [
|
||||
{
|
||||
id: "product1",
|
||||
title: "Product 1",
|
||||
variants: [],
|
||||
},
|
||||
{
|
||||
id: "product2",
|
||||
title: "Product 2",
|
||||
variants: null,
|
||||
},
|
||||
];
|
||||
|
||||
const result = tagAnalysisService.calculateTagStatistics(products);
|
||||
|
||||
expect(result.productCount).toBe(2);
|
||||
expect(result.variantCount).toBe(0);
|
||||
expect(result.totalValue).toBe(0);
|
||||
expect(result.averagePrice).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("searchTags", () => {
|
||||
const mockTags = [
|
||||
{
|
||||
tag: "summer-sale",
|
||||
productCount: 5,
|
||||
products: [
|
||||
{ id: "1", title: "Summer Dress", variantCount: 2 },
|
||||
{ id: "2", title: "Beach Hat", variantCount: 1 },
|
||||
],
|
||||
},
|
||||
{
|
||||
tag: "winter-collection",
|
||||
productCount: 3,
|
||||
products: [{ id: "3", title: "Winter Coat", variantCount: 3 }],
|
||||
},
|
||||
{
|
||||
tag: "accessories",
|
||||
productCount: 8,
|
||||
products: [{ id: "4", title: "Summer Sunglasses", variantCount: 1 }],
|
||||
},
|
||||
];
|
||||
|
||||
it("should return all tags when query is empty", () => {
|
||||
const result1 = tagAnalysisService.searchTags(mockTags, "");
|
||||
const result2 = tagAnalysisService.searchTags(mockTags, " ");
|
||||
const result3 = tagAnalysisService.searchTags(mockTags, null);
|
||||
const result4 = tagAnalysisService.searchTags(mockTags, undefined);
|
||||
|
||||
expect(result1).toEqual(mockTags);
|
||||
expect(result2).toEqual(mockTags);
|
||||
expect(result3).toEqual(mockTags);
|
||||
expect(result4).toEqual(mockTags);
|
||||
});
|
||||
|
||||
it("should filter tags by tag name", () => {
|
||||
const result = tagAnalysisService.searchTags(mockTags, "summer-sale");
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].tag).toBe("summer-sale");
|
||||
});
|
||||
|
||||
it("should filter tags by product title", () => {
|
||||
const result = tagAnalysisService.searchTags(mockTags, "coat");
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].tag).toBe("winter-collection");
|
||||
});
|
||||
|
||||
it("should be case insensitive", () => {
|
||||
const result1 = tagAnalysisService.searchTags(mockTags, "SUMMER-SALE");
|
||||
const result2 = tagAnalysisService.searchTags(mockTags, "Winter");
|
||||
|
||||
expect(result1).toHaveLength(1);
|
||||
expect(result1[0].tag).toBe("summer-sale");
|
||||
expect(result2).toHaveLength(1);
|
||||
expect(result2[0].tag).toBe("winter-collection");
|
||||
});
|
||||
|
||||
it("should return multiple matches", () => {
|
||||
const result = tagAnalysisService.searchTags(mockTags, "summer");
|
||||
|
||||
// Should match both "summer-sale" tag and "Summer Sunglasses" product
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result.map((t) => t.tag)).toContain("summer-sale");
|
||||
expect(result.map((t) => t.tag)).toContain("accessories");
|
||||
});
|
||||
|
||||
it("should return empty array when no matches found", () => {
|
||||
const result = tagAnalysisService.searchTags(mockTags, "nonexistent");
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should handle tags without products array", () => {
|
||||
const tagsWithoutProducts = [
|
||||
{
|
||||
tag: "test-tag",
|
||||
productCount: 1,
|
||||
// No products array
|
||||
},
|
||||
];
|
||||
|
||||
const result = tagAnalysisService.searchTags(tagsWithoutProducts, "test");
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].tag).toBe("test-tag");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTagAnalysisSummary", () => {
|
||||
it("should calculate summary statistics correctly", () => {
|
||||
const tags = [
|
||||
{
|
||||
tag: "tag1",
|
||||
productCount: 5,
|
||||
variantCount: 10,
|
||||
totalValue: 100,
|
||||
},
|
||||
{
|
||||
tag: "tag2",
|
||||
productCount: 3,
|
||||
variantCount: 6,
|
||||
totalValue: 60,
|
||||
},
|
||||
{
|
||||
tag: "tag3",
|
||||
productCount: 2,
|
||||
variantCount: 4,
|
||||
totalValue: 40,
|
||||
},
|
||||
];
|
||||
|
||||
const result = tagAnalysisService.getTagAnalysisSummary(tags);
|
||||
|
||||
expect(result.totalTags).toBe(3);
|
||||
expect(result.totalProducts).toBe(10);
|
||||
expect(result.totalVariants).toBe(20);
|
||||
expect(result.totalValue).toBe(200);
|
||||
expect(result.averageProductsPerTag).toBe(10 / 3);
|
||||
expect(result.averageVariantsPerTag).toBe(20 / 3);
|
||||
});
|
||||
|
||||
it("should handle empty tags array", () => {
|
||||
const result = tagAnalysisService.getTagAnalysisSummary([]);
|
||||
|
||||
expect(result.totalTags).toBe(0);
|
||||
expect(result.totalProducts).toBe(0);
|
||||
expect(result.totalVariants).toBe(0);
|
||||
expect(result.totalValue).toBe(0);
|
||||
expect(result.averageProductsPerTag).toBe(0);
|
||||
expect(result.averageVariantsPerTag).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle null/undefined tags", () => {
|
||||
const result1 = tagAnalysisService.getTagAnalysisSummary(null);
|
||||
const result2 = tagAnalysisService.getTagAnalysisSummary(undefined);
|
||||
|
||||
expect(result1.totalTags).toBe(0);
|
||||
expect(result2.totalTags).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("GraphQL queries", () => {
|
||||
it("should have correct getAllProductsWithTagsQuery structure", () => {
|
||||
const query = tagAnalysisService.getAllProductsWithTagsQuery();
|
||||
|
||||
expect(query).toContain("query getAllProductsWithTags");
|
||||
expect(query).toContain("products(first: $first, after: $after)");
|
||||
expect(query).toContain("tags");
|
||||
expect(query).toContain("variants");
|
||||
expect(query).toContain("pageInfo");
|
||||
});
|
||||
|
||||
it("should have correct getProductsByTagQuery structure", () => {
|
||||
const query = tagAnalysisService.getProductsByTagQuery();
|
||||
|
||||
expect(query).toContain("query getProductsByTag");
|
||||
expect(query).toContain(
|
||||
"products(first: $first, after: $after, query: $query)"
|
||||
);
|
||||
expect(query).toContain("tags");
|
||||
expect(query).toContain("variants");
|
||||
expect(query).toContain("compareAtPrice");
|
||||
expect(query).toContain("pageInfo");
|
||||
});
|
||||
});
|
||||
});
|
||||
428
tests/services/logReader.test.js
Normal file
428
tests/services/logReader.test.js
Normal file
@@ -0,0 +1,428 @@
|
||||
const fs = require("fs").promises;
|
||||
const LogReaderService = require("../../src/services/logReader");
|
||||
|
||||
// Mock fs module
|
||||
jest.mock("fs", () => ({
|
||||
promises: {
|
||||
stat: jest.fn(),
|
||||
readFile: jest.fn(),
|
||||
access: jest.fn(),
|
||||
},
|
||||
watchFile: jest.fn(),
|
||||
unwatchFile: jest.fn(),
|
||||
}));
|
||||
|
||||
describe("LogReaderService", () => {
|
||||
let logReader;
|
||||
let mockLogContent;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
logReader = new LogReaderService("test-progress.md");
|
||||
|
||||
// Mock log content
|
||||
mockLogContent = `# Shopify Price Update Progress Log
|
||||
|
||||
This file tracks the progress of price update operations.
|
||||
|
||||
|
||||
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Price Adjustment: -10%
|
||||
- Started: 2025-08-06 20:30:39 UTC
|
||||
|
||||
**Progress:**
|
||||
- ✅ **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $600 → $540
|
||||
- Compare At Price: $600
|
||||
- Updated: 2025-08-06 20:30:40 UTC
|
||||
- ❌ **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Error: Rate limit exceeded
|
||||
- Failed: 2025-08-06 20:30:41 UTC
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 2
|
||||
- Successful Updates: 1
|
||||
- Failed Updates: 1
|
||||
- Duration: 2 seconds
|
||||
- Completed: 2025-08-06 20:30:42 UTC
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Price Rollback Operation - 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: Collection-Snowboard
|
||||
- Operation Mode: rollback
|
||||
- Started: 2025-08-06 20:31:06 UTC
|
||||
|
||||
**Progress:**
|
||||
- 🔄 **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
||||
- Variant: gid://shopify/ProductVariant/44236769263907
|
||||
- Price: $540 → $600 (from Compare At: $600)
|
||||
- Rolled back: 2025-08-06 20:31:07 UTC
|
||||
|
||||
**Rollback Summary:**
|
||||
- Total Products Processed: 1
|
||||
- Total Variants Processed: 1
|
||||
- Eligible Variants: 1
|
||||
- Successful Rollbacks: 1
|
||||
- Failed Rollbacks: 0
|
||||
- Skipped Variants: 0 (no compare-at price)
|
||||
- Duration: 1 seconds
|
||||
- Completed: 2025-08-06 20:31:07 UTC
|
||||
|
||||
---
|
||||
|
||||
|
||||
**Error Analysis - 2025-08-06 20:31:10 UTC**
|
||||
|
||||
**Error Summary by Category:**
|
||||
- Rate Limiting: 1 error
|
||||
|
||||
**Detailed Error Log:**
|
||||
1. **Failed Product** (gid://shopify/Product/failed123)
|
||||
- Variant: gid://shopify/ProductVariant/failed456
|
||||
- Category: Rate Limiting
|
||||
- Error: Rate limit exceeded (429)
|
||||
`;
|
||||
});
|
||||
|
||||
describe("File Reading", () => {
|
||||
test("reads and parses log entries successfully", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(fs.stat).toHaveBeenCalledWith("test-progress.md");
|
||||
expect(fs.readFile).toHaveBeenCalledWith("test-progress.md", "utf8");
|
||||
expect(entries).toHaveLength(2); // Two main operations
|
||||
expect(entries[0].type).toBe("rollback"); // Newest first
|
||||
expect(entries[1].type).toBe("update");
|
||||
});
|
||||
|
||||
test("returns empty array when file doesn't exist", async () => {
|
||||
const error = new Error("File not found");
|
||||
error.code = "ENOENT";
|
||||
fs.stat.mockRejectedValue(error);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries).toEqual([]);
|
||||
});
|
||||
|
||||
test("throws error for other file system errors", async () => {
|
||||
const error = new Error("Permission denied");
|
||||
error.code = "EACCES";
|
||||
fs.stat.mockRejectedValue(error);
|
||||
|
||||
await expect(logReader.readLogEntries()).rejects.toThrow(
|
||||
"Permission denied"
|
||||
);
|
||||
});
|
||||
|
||||
test("uses cache when file hasn't changed", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// First call
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call with same mtime
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1); // Should use cache
|
||||
});
|
||||
|
||||
test("refreshes cache when file has changed", async () => {
|
||||
const oldStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
const newStats = { mtime: new Date("2025-08-06T20:33:00Z") };
|
||||
|
||||
fs.stat.mockResolvedValueOnce(oldStats).mockResolvedValueOnce(newStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// First call
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call with different mtime
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(2); // Should refresh cache
|
||||
});
|
||||
});
|
||||
|
||||
describe("Log Parsing", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("parses operation headers correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.title).toBe(
|
||||
"Price Update Operation - 2025-08-06 20:30:39 UTC"
|
||||
);
|
||||
expect(updateOp.level).toBe("INFO");
|
||||
expect(updateOp.message).toBe(
|
||||
"Started: Price Update Operation - 2025-08-06 20:30:39 UTC"
|
||||
);
|
||||
|
||||
const rollbackOp = entries.find((e) => e.type === "rollback");
|
||||
expect(rollbackOp.title).toBe(
|
||||
"Price Rollback Operation - 2025-08-06 20:31:06 UTC"
|
||||
);
|
||||
});
|
||||
|
||||
test("parses configuration sections correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.configuration["Target Tag"]).toBe("Collection-Snowboard");
|
||||
expect(updateOp.configuration["Price Adjustment"]).toBe("-10%");
|
||||
expect(updateOp.details).toContain("Target Tag: Collection-Snowboard");
|
||||
});
|
||||
|
||||
test("parses timestamps correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
const updateOp = entries.find((e) => e.type === "update");
|
||||
expect(updateOp.timestamp).toEqual(new Date("2025-08-06T20:30:39Z"));
|
||||
expect(updateOp.rawTimestamp).toBe("2025-08-06 20:30:39 UTC");
|
||||
});
|
||||
|
||||
test("identifies operation types correctly", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries.some((e) => e.type === "update")).toBe(true);
|
||||
expect(entries.some((e) => e.type === "rollback")).toBe(true);
|
||||
});
|
||||
|
||||
test("sorts entries by timestamp (newest first)", async () => {
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
// Rollback operation (2025-08-06 20:31:06) should come before update (2025-08-06 20:30:39)
|
||||
expect(entries[0].type).toBe("rollback");
|
||||
expect(entries[1].type).toBe("update");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Pagination", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("returns paginated results correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
page: 0,
|
||||
pageSize: 1,
|
||||
levelFilter: "ALL",
|
||||
searchTerm: "",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(1);
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.pagination.pageSize).toBe(1);
|
||||
expect(result.pagination.totalEntries).toBe(2);
|
||||
expect(result.pagination.totalPages).toBe(2);
|
||||
expect(result.pagination.hasNextPage).toBe(true);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
});
|
||||
|
||||
test("handles second page correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
page: 1,
|
||||
pageSize: 1,
|
||||
levelFilter: "ALL",
|
||||
searchTerm: "",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(1);
|
||||
expect(result.pagination.currentPage).toBe(1);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(true);
|
||||
});
|
||||
|
||||
test("uses default pagination options", async () => {
|
||||
const result = await logReader.getPaginatedEntries();
|
||||
|
||||
expect(result.pagination.pageSize).toBe(20);
|
||||
expect(result.pagination.currentPage).toBe(0);
|
||||
expect(result.filters.levelFilter).toBe("ALL");
|
||||
expect(result.filters.searchTerm).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Filtering", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("filters by log level correctly", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
levelFilter: "INFO",
|
||||
});
|
||||
|
||||
expect(result.entries.every((e) => e.level === "INFO")).toBe(true);
|
||||
expect(result.filters.levelFilter).toBe("INFO");
|
||||
});
|
||||
|
||||
test("filters by search term in message", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "rollback",
|
||||
});
|
||||
|
||||
expect(result.entries.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.entries.some(
|
||||
(e) =>
|
||||
e.message.toLowerCase().includes("rollback") ||
|
||||
e.title.toLowerCase().includes("rollback")
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test("filters by search term in details", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "Collection-Snowboard",
|
||||
});
|
||||
|
||||
expect(result.entries.length).toBeGreaterThan(0);
|
||||
expect(
|
||||
result.entries.some((e) => e.details.includes("Collection-Snowboard"))
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test("returns empty results for non-matching filters", async () => {
|
||||
const result = await logReader.getPaginatedEntries({
|
||||
searchTerm: "nonexistent-term-xyz",
|
||||
});
|
||||
|
||||
expect(result.entries).toHaveLength(0);
|
||||
expect(result.pagination.totalEntries).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Statistics", () => {
|
||||
beforeEach(async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
});
|
||||
|
||||
test("calculates log statistics correctly", async () => {
|
||||
const stats = await logReader.getLogStatistics();
|
||||
|
||||
expect(stats.totalEntries).toBe(2);
|
||||
expect(stats.byLevel.INFO).toBe(2);
|
||||
expect(stats.byType.update).toBe(1);
|
||||
expect(stats.byType.rollback).toBe(1);
|
||||
expect(stats.operations.total).toBe(2);
|
||||
});
|
||||
|
||||
test("tracks date range correctly", async () => {
|
||||
const stats = await logReader.getLogStatistics();
|
||||
|
||||
expect(stats.dateRange.oldest).toEqual(new Date("2025-08-06T20:30:39Z"));
|
||||
expect(stats.dateRange.newest).toEqual(new Date("2025-08-06T20:31:06Z"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cache Management", () => {
|
||||
test("clears cache when requested", async () => {
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(mockLogContent);
|
||||
|
||||
// Load data to populate cache
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Clear cache and load again
|
||||
logReader.clearCache();
|
||||
await logReader.readLogEntries();
|
||||
expect(fs.readFile).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File Watching", () => {
|
||||
test("sets up file watching correctly", () => {
|
||||
const mockCallback = jest.fn();
|
||||
const mockCleanup = jest.fn();
|
||||
|
||||
require("fs").watchFile.mockReturnValue(mockCleanup);
|
||||
|
||||
const cleanup = logReader.watchFile(mockCallback);
|
||||
|
||||
expect(require("fs").watchFile).toHaveBeenCalledWith(
|
||||
"test-progress.md",
|
||||
expect.any(Function)
|
||||
);
|
||||
expect(typeof cleanup).toBe("function");
|
||||
});
|
||||
|
||||
test("returns no-op cleanup function when watching fails", () => {
|
||||
require("fs").watchFile.mockImplementation(() => {
|
||||
throw new Error("Watch failed");
|
||||
});
|
||||
|
||||
const cleanup = logReader.watchFile(() => {});
|
||||
|
||||
expect(typeof cleanup).toBe("function");
|
||||
// Should not throw when called
|
||||
expect(() => cleanup()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling", () => {
|
||||
test("handles malformed log content gracefully", async () => {
|
||||
const malformedContent =
|
||||
"This is not a valid log format\nRandom text\n## Invalid header";
|
||||
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(malformedContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
// Should return empty array or minimal parsed data without throwing
|
||||
expect(Array.isArray(entries)).toBe(true);
|
||||
});
|
||||
|
||||
test("handles invalid timestamps gracefully", async () => {
|
||||
const invalidTimestampContent = `## Price Update Operation - invalid-timestamp
|
||||
|
||||
**Configuration:**
|
||||
- Target Tag: test
|
||||
|
||||
**Progress:**
|
||||
|
||||
**Summary:**
|
||||
- Total Products Processed: 0
|
||||
`;
|
||||
|
||||
const mockStats = { mtime: new Date("2025-08-06T20:32:00Z") };
|
||||
fs.stat.mockResolvedValue(mockStats);
|
||||
fs.readFile.mockResolvedValue(invalidTimestampContent);
|
||||
|
||||
const entries = await logReader.readLogEntries();
|
||||
|
||||
expect(entries).toHaveLength(1);
|
||||
expect(entries[0].timestamp).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
});
|
||||
82
tests/services/scheduleManagement.test.js
Normal file
82
tests/services/scheduleManagement.test.js
Normal file
@@ -0,0 +1,82 @@
|
||||
const ScheduleService = require("../../src/services/scheduleManagement");
|
||||
|
||||
describe("ScheduleService", () => {
|
||||
let scheduleService;
|
||||
|
||||
beforeEach(() => {
|
||||
scheduleService = new ScheduleService();
|
||||
});
|
||||
|
||||
test("should validate a valid schedule", () => {
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
recurrence: "daily",
|
||||
enabled: true,
|
||||
config: { targetTag: "sale" },
|
||||
status: "pending",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(validSchedule);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test("should return error for missing operation type", () => {
|
||||
const invalidSchedule = {
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Operation type is required");
|
||||
});
|
||||
|
||||
test("should return error for invalid operation type", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe('Operation type must be "update" or "rollback"');
|
||||
});
|
||||
|
||||
test("should generate unique IDs", () => {
|
||||
const existingSchedules = [
|
||||
{ id: "schedule_123_abc" },
|
||||
{ id: "schedule_456_def" },
|
||||
];
|
||||
|
||||
const id1 = scheduleService._generateId(existingSchedules);
|
||||
const id2 = scheduleService._generateId(existingSchedules);
|
||||
|
||||
expect(id1).toMatch(/^schedule_\d+_[a-z0-9]+$/);
|
||||
expect(id2).toMatch(/^schedule_\d+_[a-z0-9]+$/);
|
||||
expect(id1).not.toBe(id2);
|
||||
expect(existingSchedules.some((s) => s.id === id1)).toBe(false);
|
||||
expect(existingSchedules.some((s) => s.id === id2)).toBe(false);
|
||||
});
|
||||
|
||||
test("should calculate next execution for daily recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"daily"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeInstanceOf(Date);
|
||||
expect(nextExecution.getDate()).toBe(scheduledTime.getDate() + 1);
|
||||
});
|
||||
|
||||
test("should return null for once recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"once"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeNull();
|
||||
});
|
||||
});
|
||||
593
tests/services/scheduleService.test.js
Normal file
593
tests/services/scheduleService.test.js
Normal file
@@ -0,0 +1,593 @@
|
||||
/**
|
||||
* Unit tests for ScheduleService (Schedule Management) functionality
|
||||
* Tests Requirements 1.6, 5.1 from the tui-missing-screens spec
|
||||
*/
|
||||
|
||||
const ScheduleService = require("../../src/services/scheduleManagement");
|
||||
const fs = require("fs").promises;
|
||||
const path = require("path");
|
||||
|
||||
describe("ScheduleService", () => {
|
||||
let scheduleService;
|
||||
let testSchedulesFile;
|
||||
|
||||
beforeEach(() => {
|
||||
// Use a unique test file for each test to avoid conflicts
|
||||
testSchedulesFile = `test-schedules-${Date.now()}-${Math.random()
|
||||
.toString(36)
|
||||
.substr(2, 9)}.json`;
|
||||
|
||||
// Create a custom ScheduleService instance that uses our test file
|
||||
scheduleService = new ScheduleService();
|
||||
scheduleService.schedulesFile = path.join(process.cwd(), testSchedulesFile);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test file after each test
|
||||
try {
|
||||
await fs.unlink(testSchedulesFile);
|
||||
} catch (error) {
|
||||
// File might not exist, that's okay
|
||||
}
|
||||
});
|
||||
|
||||
describe("validateSchedule", () => {
|
||||
test("should return null for valid schedule", () => {
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
recurrence: "daily",
|
||||
enabled: true,
|
||||
config: { targetTag: "sale" },
|
||||
status: "pending",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(validSchedule);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test("should return error for missing schedule object", () => {
|
||||
const result = scheduleService.validateSchedule(null);
|
||||
|
||||
expect(result).toBe("Schedule object is required");
|
||||
});
|
||||
|
||||
test("should return error for missing operation type", () => {
|
||||
const invalidSchedule = {
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Operation type is required");
|
||||
});
|
||||
|
||||
test("should return error for invalid operation type", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe('Operation type must be "update" or "rollback"');
|
||||
});
|
||||
|
||||
test("should return error for missing scheduled time", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Scheduled time is required");
|
||||
});
|
||||
|
||||
test("should return error for invalid scheduled time", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: "invalid date",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Scheduled time must be a valid date");
|
||||
});
|
||||
|
||||
test("should return error for past scheduled time on new schedules", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() - 86400000), // Yesterday
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Scheduled time must be in the future");
|
||||
});
|
||||
|
||||
test("should allow past scheduled time for existing schedules", () => {
|
||||
const existingSchedule = {
|
||||
id: "existing_schedule",
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() - 86400000), // Yesterday
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(existingSchedule);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test("should return error for invalid recurrence", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
recurrence: "invalid",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe(
|
||||
"Recurrence must be one of: once, daily, weekly, monthly"
|
||||
);
|
||||
});
|
||||
|
||||
test("should return error for invalid status", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
status: "invalid",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe(
|
||||
"Status must be one of: pending, completed, failed, cancelled"
|
||||
);
|
||||
});
|
||||
|
||||
test("should return error for invalid enabled flag", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
enabled: "not boolean",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Enabled must be a boolean value");
|
||||
});
|
||||
|
||||
test("should return error for invalid config", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
config: "not an object",
|
||||
};
|
||||
|
||||
const result = scheduleService.validateSchedule(invalidSchedule);
|
||||
|
||||
expect(result).toBe("Config must be an object");
|
||||
});
|
||||
});
|
||||
|
||||
describe("loadSchedules", () => {
|
||||
test("should return empty array when schedules file does not exist", async () => {
|
||||
const result = await scheduleService.loadSchedules();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
test("should load schedules from JSON file and convert date strings to Date objects", async () => {
|
||||
const mockScheduleData = [
|
||||
{
|
||||
id: "schedule_1",
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-12-01T10:00:00.000Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: { targetTag: "sale" },
|
||||
status: "pending",
|
||||
createdAt: "2024-11-01T10:00:00.000Z",
|
||||
lastExecuted: null,
|
||||
nextExecution: null,
|
||||
},
|
||||
];
|
||||
|
||||
// Write test data to file
|
||||
await fs.writeFile(
|
||||
scheduleService.schedulesFile,
|
||||
JSON.stringify(mockScheduleData),
|
||||
"utf8"
|
||||
);
|
||||
|
||||
const result = await scheduleService.loadSchedules();
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].scheduledTime).toBeInstanceOf(Date);
|
||||
expect(result[0].createdAt).toBeInstanceOf(Date);
|
||||
expect(result[0].lastExecuted).toBeNull();
|
||||
expect(result[0].nextExecution).toBeNull();
|
||||
});
|
||||
|
||||
test("should throw error for invalid JSON", async () => {
|
||||
// Write invalid JSON to file
|
||||
await fs.writeFile(scheduleService.schedulesFile, "invalid json", "utf8");
|
||||
|
||||
await expect(scheduleService.loadSchedules()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("saveSchedules", () => {
|
||||
test("should save schedules to JSON file with date objects converted to ISO strings", async () => {
|
||||
const schedules = [
|
||||
{
|
||||
id: "schedule_1",
|
||||
operationType: "update",
|
||||
scheduledTime: new Date("2024-12-01T10:00:00.000Z"),
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: { targetTag: "sale" },
|
||||
status: "pending",
|
||||
createdAt: new Date("2024-11-01T10:00:00.000Z"),
|
||||
lastExecuted: null,
|
||||
nextExecution: null,
|
||||
},
|
||||
];
|
||||
|
||||
await scheduleService.saveSchedules(schedules);
|
||||
|
||||
// Read the file back and verify content
|
||||
const fileContent = await fs.readFile(
|
||||
scheduleService.schedulesFile,
|
||||
"utf8"
|
||||
);
|
||||
const savedData = JSON.parse(fileContent);
|
||||
|
||||
expect(savedData).toHaveLength(1);
|
||||
expect(savedData[0].scheduledTime).toBe("2024-12-01T10:00:00.000Z");
|
||||
expect(savedData[0].createdAt).toBe("2024-11-01T10:00:00.000Z");
|
||||
});
|
||||
});
|
||||
|
||||
describe("addSchedule", () => {
|
||||
test("should add a valid schedule with generated ID and defaults", async () => {
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000), // Tomorrow
|
||||
recurrence: "daily",
|
||||
config: { targetTag: "sale" },
|
||||
};
|
||||
|
||||
const result = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
expect(result.id).toMatch(/^schedule_\d+_[a-z0-9]+$/);
|
||||
expect(result.operationType).toBe("update");
|
||||
expect(result.scheduledTime).toBeInstanceOf(Date);
|
||||
expect(result.recurrence).toBe("daily");
|
||||
expect(result.enabled).toBe(true);
|
||||
expect(result.status).toBe("pending");
|
||||
expect(result.createdAt).toBeInstanceOf(Date);
|
||||
expect(result.nextExecution).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
test("should apply default values for optional fields", async () => {
|
||||
const newSchedule = {
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const result = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
expect(result.recurrence).toBe("once");
|
||||
expect(result.enabled).toBe(true);
|
||||
expect(result.config).toEqual({});
|
||||
expect(result.nextExecution).toBeNull();
|
||||
});
|
||||
|
||||
test("should throw error for invalid schedule", async () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
await expect(
|
||||
scheduleService.addSchedule(invalidSchedule)
|
||||
).rejects.toThrow(
|
||||
'Invalid schedule: Operation type must be "update" or "rollback"'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateSchedule", () => {
|
||||
test("should update existing schedule", async () => {
|
||||
// First add a schedule
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
recurrence: "once",
|
||||
config: { targetTag: "sale" },
|
||||
};
|
||||
|
||||
const addedSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
// Then update it
|
||||
const updates = {
|
||||
enabled: false,
|
||||
recurrence: "weekly",
|
||||
};
|
||||
|
||||
const result = await scheduleService.updateSchedule(
|
||||
addedSchedule.id,
|
||||
updates
|
||||
);
|
||||
|
||||
expect(result.enabled).toBe(false);
|
||||
expect(result.recurrence).toBe("weekly");
|
||||
expect(result.id).toBe(addedSchedule.id);
|
||||
});
|
||||
|
||||
test("should recalculate nextExecution when scheduledTime is updated", async () => {
|
||||
// First add a schedule
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
recurrence: "daily",
|
||||
};
|
||||
|
||||
const addedSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
// Update with new scheduled time
|
||||
const newScheduledTime = new Date(Date.now() + 172800000); // 2 days from now
|
||||
const updates = {
|
||||
scheduledTime: newScheduledTime,
|
||||
recurrence: "daily",
|
||||
};
|
||||
|
||||
const result = await scheduleService.updateSchedule(
|
||||
addedSchedule.id,
|
||||
updates
|
||||
);
|
||||
|
||||
expect(result.scheduledTime).toEqual(newScheduledTime);
|
||||
expect(result.nextExecution).toBeInstanceOf(Date);
|
||||
expect(result.nextExecution.getTime()).toBeGreaterThan(
|
||||
newScheduledTime.getTime()
|
||||
);
|
||||
});
|
||||
|
||||
test("should throw error for non-existent schedule", async () => {
|
||||
await expect(
|
||||
scheduleService.updateSchedule("non_existent", { enabled: false })
|
||||
).rejects.toThrow("Schedule with ID non_existent not found");
|
||||
});
|
||||
|
||||
test("should throw error for invalid updates", async () => {
|
||||
// First add a schedule
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const addedSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
const invalidUpdates = {
|
||||
operationType: "invalid",
|
||||
};
|
||||
|
||||
await expect(
|
||||
scheduleService.updateSchedule(addedSchedule.id, invalidUpdates)
|
||||
).rejects.toThrow("Invalid schedule update");
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteSchedule", () => {
|
||||
test("should delete existing schedule and return true", async () => {
|
||||
// First add a schedule
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
config: { targetTag: "sale" },
|
||||
};
|
||||
|
||||
const addedSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
|
||||
// Then delete it
|
||||
const result = await scheduleService.deleteSchedule(addedSchedule.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify it's gone
|
||||
const schedules = await scheduleService.loadSchedules();
|
||||
expect(schedules).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("should return false for non-existent schedule", async () => {
|
||||
const result = await scheduleService.deleteSchedule("non_existent");
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("helper methods", () => {
|
||||
test("should get schedules by status", async () => {
|
||||
// Add schedules with different statuses
|
||||
const schedule1 = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
});
|
||||
|
||||
await scheduleService.updateSchedule(schedule1.id, {
|
||||
status: "completed",
|
||||
});
|
||||
|
||||
const schedule2 = await scheduleService.addSchedule({
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 172800000),
|
||||
});
|
||||
|
||||
const pendingSchedules = await scheduleService.getSchedulesByStatus(
|
||||
"pending"
|
||||
);
|
||||
const completedSchedules = await scheduleService.getSchedulesByStatus(
|
||||
"completed"
|
||||
);
|
||||
|
||||
expect(pendingSchedules).toHaveLength(1);
|
||||
expect(completedSchedules).toHaveLength(1);
|
||||
expect(pendingSchedules[0].id).toBe(schedule2.id);
|
||||
expect(completedSchedules[0].id).toBe(schedule1.id);
|
||||
});
|
||||
|
||||
test("should get schedules by operation type", async () => {
|
||||
const schedule1 = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
});
|
||||
|
||||
const schedule2 = await scheduleService.addSchedule({
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 172800000),
|
||||
});
|
||||
|
||||
const schedule3 = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 259200000),
|
||||
});
|
||||
|
||||
const updateSchedules = await scheduleService.getSchedulesByOperationType(
|
||||
"update"
|
||||
);
|
||||
const rollbackSchedules =
|
||||
await scheduleService.getSchedulesByOperationType("rollback");
|
||||
|
||||
expect(updateSchedules).toHaveLength(2);
|
||||
expect(rollbackSchedules).toHaveLength(1);
|
||||
expect(rollbackSchedules[0].id).toBe(schedule2.id);
|
||||
});
|
||||
|
||||
test("should get enabled schedules", async () => {
|
||||
const schedule1 = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const schedule2 = await scheduleService.addSchedule({
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 172800000),
|
||||
enabled: false,
|
||||
});
|
||||
|
||||
const schedule3 = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 259200000),
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const enabledSchedules = await scheduleService.getEnabledSchedules();
|
||||
|
||||
expect(enabledSchedules).toHaveLength(2);
|
||||
expect(enabledSchedules.every((s) => s.enabled === true)).toBe(true);
|
||||
});
|
||||
|
||||
test("should mark schedule as completed", async () => {
|
||||
const schedule = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
});
|
||||
|
||||
const result = await scheduleService.markScheduleCompleted(schedule.id);
|
||||
|
||||
expect(result.status).toBe("completed");
|
||||
expect(result.lastExecuted).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
test("should mark schedule as failed", async () => {
|
||||
const schedule = await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000),
|
||||
});
|
||||
|
||||
const result = await scheduleService.markScheduleFailed(
|
||||
schedule.id,
|
||||
"Test error"
|
||||
);
|
||||
|
||||
expect(result.status).toBe("failed");
|
||||
expect(result.lastExecuted).toBeInstanceOf(Date);
|
||||
expect(result.errorMessage).toBe("Test error");
|
||||
});
|
||||
});
|
||||
|
||||
describe("private methods", () => {
|
||||
test("should generate unique IDs", () => {
|
||||
const existingSchedules = [
|
||||
{ id: "schedule_123_abc" },
|
||||
{ id: "schedule_456_def" },
|
||||
];
|
||||
|
||||
const id1 = scheduleService._generateId(existingSchedules);
|
||||
const id2 = scheduleService._generateId(existingSchedules);
|
||||
|
||||
expect(id1).toMatch(/^schedule_\d+_[a-z0-9]+$/);
|
||||
expect(id2).toMatch(/^schedule_\d+_[a-z0-9]+$/);
|
||||
expect(id1).not.toBe(id2);
|
||||
expect(existingSchedules.some((s) => s.id === id1)).toBe(false);
|
||||
expect(existingSchedules.some((s) => s.id === id2)).toBe(false);
|
||||
});
|
||||
|
||||
test("should calculate next execution for daily recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"daily"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeInstanceOf(Date);
|
||||
expect(nextExecution.getDate()).toBe(scheduledTime.getDate() + 1);
|
||||
});
|
||||
|
||||
test("should calculate next execution for weekly recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"weekly"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeInstanceOf(Date);
|
||||
expect(nextExecution.getDate()).toBe(scheduledTime.getDate() + 7);
|
||||
});
|
||||
|
||||
test("should calculate next execution for monthly recurrence", () => {
|
||||
const scheduledTime = new Date("2024-11-01T10:00:00.000Z"); // November instead of December
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"monthly"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeInstanceOf(Date);
|
||||
expect(nextExecution.getMonth()).toBe(scheduledTime.getMonth() + 1);
|
||||
});
|
||||
|
||||
test("should return null for once recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"once"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeNull();
|
||||
});
|
||||
|
||||
test("should return null for invalid recurrence", () => {
|
||||
const scheduledTime = new Date("2024-12-01T10:00:00.000Z");
|
||||
const nextExecution = scheduleService._calculateNextExecution(
|
||||
scheduledTime,
|
||||
"invalid"
|
||||
);
|
||||
|
||||
expect(nextExecution).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
328
tests/services/tagAnalysis.test.js
Normal file
328
tests/services/tagAnalysis.test.js
Normal file
@@ -0,0 +1,328 @@
|
||||
const TagAnalysisService = require("../../src/services/tagAnalysis");
|
||||
const ProductService = require("../../src/services/product");
|
||||
const ProgressService = require("../../src/services/progress");
|
||||
|
||||
// Mock the dependencies
|
||||
jest.mock("../../src/services/product");
|
||||
jest.mock("../../src/services/progress");
|
||||
|
||||
describe("TagAnalysisService", () => {
|
||||
let tagAnalysisService;
|
||||
let mockProductService;
|
||||
let mockProgressService;
|
||||
|
||||
const mockProducts = [
|
||||
{
|
||||
id: "product1",
|
||||
title: "Test Product 1",
|
||||
tags: ["sale", "featured", "new"],
|
||||
variants: [
|
||||
{ id: "variant1", price: "29.99" },
|
||||
{ id: "variant2", price: "39.99" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "product2",
|
||||
title: "Test Product 2",
|
||||
tags: ["sale", "clearance"],
|
||||
variants: [{ id: "variant3", price: "19.99" }],
|
||||
},
|
||||
{
|
||||
id: "product3",
|
||||
title: "Test Product 3",
|
||||
tags: ["featured", "premium"],
|
||||
variants: [
|
||||
{ id: "variant4", price: "99.99" },
|
||||
{ id: "variant5", price: "149.99" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "product4",
|
||||
title: "Test Product 4",
|
||||
tags: ["new"],
|
||||
variants: [{ id: "variant6", price: "49.99" }],
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockProductService = {
|
||||
debugFetchAllProductTags: jest.fn(),
|
||||
fetchProductsByTag: jest.fn(),
|
||||
};
|
||||
|
||||
mockProgressService = {
|
||||
info: jest.fn().mockResolvedValue(),
|
||||
error: jest.fn().mockResolvedValue(),
|
||||
};
|
||||
|
||||
ProductService.mockImplementation(() => mockProductService);
|
||||
ProgressService.mockImplementation(() => mockProgressService);
|
||||
|
||||
tagAnalysisService = new TagAnalysisService();
|
||||
});
|
||||
|
||||
describe("getTagAnalysis", () => {
|
||||
test("successfully analyzes product tags", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis(250);
|
||||
|
||||
expect(result).toHaveProperty("totalProducts", 4);
|
||||
expect(result).toHaveProperty("tagCounts");
|
||||
expect(result).toHaveProperty("priceRanges");
|
||||
expect(result).toHaveProperty("recommendations");
|
||||
expect(result).toHaveProperty("analyzedAt");
|
||||
|
||||
// Verify tag counts are sorted by count (descending)
|
||||
expect(result.tagCounts[0].tag).toBe("sale"); // appears in 2 products
|
||||
expect(result.tagCounts[0].count).toBe(2);
|
||||
expect(result.tagCounts[0].percentage).toBe(50.0);
|
||||
|
||||
expect(result.tagCounts[1].tag).toBe("featured"); // appears in 2 products
|
||||
expect(result.tagCounts[1].count).toBe(2);
|
||||
expect(result.tagCounts[1].percentage).toBe(50.0);
|
||||
});
|
||||
|
||||
test("calculates price ranges correctly", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
// Check sale tag price range (products 1 and 2)
|
||||
const salePriceRange = result.priceRanges["sale"];
|
||||
expect(salePriceRange).toBeDefined();
|
||||
expect(salePriceRange.min).toBe(19.99);
|
||||
expect(salePriceRange.max).toBe(39.99);
|
||||
expect(salePriceRange.count).toBe(3); // 2 variants from product1 + 1 from product2
|
||||
expect(salePriceRange.average).toBeCloseTo(29.99, 2); // (29.99 + 39.99 + 19.99) / 3
|
||||
|
||||
// Check featured tag price range (products 1 and 3)
|
||||
const featuredPriceRange = result.priceRanges["featured"];
|
||||
expect(featuredPriceRange).toBeDefined();
|
||||
expect(featuredPriceRange.min).toBe(29.99);
|
||||
expect(featuredPriceRange.max).toBe(149.99);
|
||||
expect(featuredPriceRange.count).toBe(4); // 2 from product1 + 2 from product3
|
||||
});
|
||||
|
||||
test("generates appropriate recommendations", async () => {
|
||||
// Create more products to meet the minimum count requirement for caution tags
|
||||
const moreProducts = [
|
||||
...mockProducts,
|
||||
{
|
||||
id: "product5",
|
||||
title: "Product 5",
|
||||
tags: ["sale"],
|
||||
variants: [{ id: "v5", price: "25.99" }],
|
||||
},
|
||||
{
|
||||
id: "product6",
|
||||
title: "Product 6",
|
||||
tags: ["clearance"],
|
||||
variants: [{ id: "v6", price: "15.99" }],
|
||||
},
|
||||
{
|
||||
id: "product7",
|
||||
title: "Product 7",
|
||||
tags: ["clearance"],
|
||||
variants: [{ id: "v7", price: "12.99" }],
|
||||
},
|
||||
];
|
||||
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
moreProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
expect(result.recommendations).toBeInstanceOf(Array);
|
||||
expect(result.recommendations.length).toBeGreaterThan(0);
|
||||
|
||||
// Should have caution recommendation for 'sale' and 'clearance' tags
|
||||
const cautionRec = result.recommendations.find(
|
||||
(rec) => rec.type === "caution"
|
||||
);
|
||||
expect(cautionRec).toBeDefined();
|
||||
expect(cautionRec.tags).toContain("sale");
|
||||
expect(cautionRec.tags).toContain("clearance");
|
||||
});
|
||||
|
||||
test("handles empty product list", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue([]);
|
||||
|
||||
await expect(tagAnalysisService.getTagAnalysis()).rejects.toThrow(
|
||||
"No products found for tag analysis"
|
||||
);
|
||||
expect(mockProgressService.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Tag analysis failed")
|
||||
);
|
||||
});
|
||||
|
||||
test("handles products without tags", async () => {
|
||||
const productsWithoutTags = [
|
||||
{ id: "product1", title: "Product 1", tags: null, variants: [] },
|
||||
{ id: "product2", title: "Product 2", tags: [], variants: [] },
|
||||
{ id: "product3", title: "Product 3", variants: [] }, // no tags property
|
||||
];
|
||||
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
productsWithoutTags
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
expect(result.totalProducts).toBe(3);
|
||||
expect(result.tagCounts).toHaveLength(0);
|
||||
expect(Object.keys(result.priceRanges)).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("caches results for performance", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
// First call
|
||||
const result1 = await tagAnalysisService.getTagAnalysis(250);
|
||||
|
||||
// Second call should use cache
|
||||
const result2 = await tagAnalysisService.getTagAnalysis(250);
|
||||
|
||||
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
|
||||
1
|
||||
);
|
||||
expect(result1).toEqual(result2);
|
||||
});
|
||||
|
||||
test("respects cache expiry", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
// Mock Date.now to control cache expiry
|
||||
const originalDateNow = Date.now;
|
||||
let mockTime = 1000000;
|
||||
Date.now = jest.fn(() => mockTime);
|
||||
|
||||
// First call
|
||||
await tagAnalysisService.getTagAnalysis(250);
|
||||
|
||||
// Advance time beyond cache expiry (5 minutes)
|
||||
mockTime += 6 * 60 * 1000;
|
||||
|
||||
// Second call should fetch fresh data
|
||||
await tagAnalysisService.getTagAnalysis(250);
|
||||
|
||||
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
|
||||
2
|
||||
);
|
||||
|
||||
// Restore original Date.now
|
||||
Date.now = originalDateNow;
|
||||
});
|
||||
});
|
||||
|
||||
describe("Requirements Compliance", () => {
|
||||
test("meets requirement 7.1 - analyzes available product tags and counts", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
// Should provide tag counts
|
||||
expect(result.tagCounts).toBeInstanceOf(Array);
|
||||
expect(result.tagCounts.length).toBeGreaterThan(0);
|
||||
|
||||
result.tagCounts.forEach((tagInfo) => {
|
||||
expect(tagInfo).toHaveProperty("tag");
|
||||
expect(tagInfo).toHaveProperty("count");
|
||||
expect(typeof tagInfo.count).toBe("number");
|
||||
expect(tagInfo.count).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test("meets requirement 7.2 - shows sample products for selected tags", async () => {
|
||||
const mockSampleProducts = [
|
||||
{
|
||||
id: "product1",
|
||||
title: "Test Product 1",
|
||||
tags: ["sale", "featured"],
|
||||
variants: [
|
||||
{
|
||||
id: "variant1",
|
||||
title: "Default",
|
||||
price: "29.99",
|
||||
compareAtPrice: "39.99",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
mockProductService.fetchProductsByTag.mockResolvedValue(
|
||||
mockSampleProducts
|
||||
);
|
||||
|
||||
const samples = await tagAnalysisService.getSampleProductsForTag("sale");
|
||||
|
||||
// Should return sample products with essential info
|
||||
expect(samples).toBeInstanceOf(Array);
|
||||
samples.forEach((product) => {
|
||||
expect(product).toHaveProperty("id");
|
||||
expect(product).toHaveProperty("title");
|
||||
expect(product).toHaveProperty("tags");
|
||||
expect(product).toHaveProperty("variants");
|
||||
});
|
||||
});
|
||||
|
||||
test("meets requirement 7.3 - provides comprehensive tag analysis", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
// Should provide comprehensive analysis
|
||||
expect(result).toHaveProperty("totalProducts");
|
||||
expect(result).toHaveProperty("tagCounts");
|
||||
expect(result).toHaveProperty("priceRanges");
|
||||
expect(result).toHaveProperty("recommendations");
|
||||
expect(result).toHaveProperty("analyzedAt");
|
||||
|
||||
// Tag counts should be sorted and include percentages
|
||||
expect(result.tagCounts[0].count).toBeGreaterThanOrEqual(
|
||||
result.tagCounts[1]?.count || 0
|
||||
);
|
||||
result.tagCounts.forEach((tag) => {
|
||||
expect(tag).toHaveProperty("percentage");
|
||||
expect(typeof tag.percentage).toBe("number");
|
||||
});
|
||||
});
|
||||
|
||||
test("meets requirement 7.4 - provides tag recommendations", async () => {
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await tagAnalysisService.getTagAnalysis();
|
||||
|
||||
// Should provide recommendations
|
||||
expect(result.recommendations).toBeInstanceOf(Array);
|
||||
result.recommendations.forEach((rec) => {
|
||||
expect(rec).toHaveProperty("type");
|
||||
expect(rec).toHaveProperty("title");
|
||||
expect(rec).toHaveProperty("description");
|
||||
expect(rec).toHaveProperty("tags");
|
||||
expect(rec).toHaveProperty("reason");
|
||||
expect(rec).toHaveProperty("priority");
|
||||
expect(rec).toHaveProperty("actionable");
|
||||
expect(rec).toHaveProperty("estimatedImpact");
|
||||
expect(Array.isArray(rec.tags)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user