TUI is a doomed path. Stick with CLI

This commit is contained in:
2025-08-17 01:04:10 -05:00
parent f38eff12cd
commit 35960388cf
23 changed files with 6616 additions and 0 deletions

View File

@@ -0,0 +1,259 @@
const LogService = require("../../../src/tui/services/LogService.js");
describe("LogService Performance Optimizations", () => {
let service;
beforeEach(() => {
service = new LogService("test-progress.md");
jest.clearAllMocks();
});
afterEach(() => {
if (service) {
service.destroy();
}
});
describe("efficient pagination", () => {
it("should paginate logs efficiently", () => {
const logs = Array.from({ length: 100 }, (_, i) => ({
id: `log_${i}`,
timestamp: new Date(),
title: `Log Entry ${i}`,
message: `Message ${i}`,
level: "INFO",
}));
const result = service.paginateLogs(logs, 2, 10); // Page 2, 10 items per page
expect(result.entries).toHaveLength(10);
expect(result.pagination.currentPage).toBe(2);
expect(result.pagination.totalPages).toBe(10);
expect(result.pagination.hasNextPage).toBe(true);
expect(result.pagination.hasPreviousPage).toBe(true);
expect(result.pagination.startIndex).toBe(21); // 1-based index
expect(result.pagination.endIndex).toBe(30);
});
it("should handle edge cases in pagination", () => {
const logs = Array.from({ length: 5 }, (_, i) => ({
id: `log_${i}`,
timestamp: new Date(),
title: `Log Entry ${i}`,
message: `Message ${i}`,
level: "INFO",
}));
// Last page
const result = service.paginateLogs(logs, 0, 10);
expect(result.entries).toHaveLength(5);
expect(result.pagination.totalPages).toBe(1);
expect(result.pagination.hasNextPage).toBe(false);
expect(result.pagination.hasPreviousPage).toBe(false);
});
});
describe("streaming for large files", () => {
it("should parse log content in streaming mode", async () => {
const mockContent = "Test log content";
const result = await service.parseLogContentStreaming(
mockContent,
{
dateRange: "all",
operationType: "all",
status: "all",
searchTerm: "",
},
0,
10
);
expect(result).toHaveProperty("entries");
expect(result).toHaveProperty("totalCount");
expect(Array.isArray(result.entries)).toBe(true);
});
});
describe("caching optimizations", () => {
it("should track cache statistics", () => {
const stats = service.getCacheStats();
expect(stats).toHaveProperty("size");
expect(stats).toHaveProperty("keys");
expect(typeof stats.size).toBe("number");
expect(Array.isArray(stats.keys)).toBe(true);
});
it("should provide memory usage statistics", () => {
const stats = service.getMemoryStats();
expect(stats).toHaveProperty("cacheEntries");
expect(stats).toHaveProperty("estimatedSizeBytes");
expect(stats).toHaveProperty("estimatedSizeMB");
expect(stats).toHaveProperty("maxEntries");
expect(stats).toHaveProperty("cacheHitRatio");
});
});
describe("memory management", () => {
it("should clean up expired cache entries", () => {
// Add some cache entries with old timestamps
service.cache.set("old_entry", {
data: { test: "data" },
timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
});
service.cache.set("new_entry", {
data: { test: "data" },
timestamp: Date.now(),
});
expect(service.cache.size).toBe(2);
service.cleanup();
expect(service.cache.size).toBe(1);
expect(service.cache.has("new_entry")).toBe(true);
expect(service.cache.has("old_entry")).toBe(false);
});
it("should limit cache size to prevent memory issues", () => {
// Fill cache beyond limit
for (let i = 0; i < 40; i++) {
service.cache.set(`entry_${i}`, {
data: { large: "data".repeat(1000) },
timestamp: Date.now() - i * 1000, // Different timestamps
});
}
expect(service.cache.size).toBeGreaterThan(30);
service.cleanup();
expect(service.cache.size).toBeLessThanOrEqual(30);
});
it("should clean up resources on destroy", () => {
service.destroy();
expect(service.cache.size).toBe(0);
expect(service.cleanupInterval).toBeNull();
});
});
describe("filtering optimizations", () => {
it("should filter logs efficiently", () => {
const logs = [
{
id: "log_1",
timestamp: new Date("2024-01-01"),
title: "Update Product A",
message: "Product updated successfully",
level: "SUCCESS",
type: "update",
details: "Product A details",
productTitle: "Product A",
},
{
id: "log_2",
timestamp: new Date("2024-01-02"),
title: "Error Product B",
message: "Product update failed",
level: "ERROR",
type: "update",
details: "Product B error details",
productTitle: "Product B",
},
{
id: "log_3",
timestamp: new Date("2024-01-03"),
title: "Rollback Product C",
message: "Product rollback completed",
level: "INFO",
type: "rollback",
details: "Product C rollback details",
productTitle: "Product C",
},
];
// Filter by operation type
const updateLogs = service.filterLogs(logs, { operationType: "update" });
expect(updateLogs).toHaveLength(2);
// Filter by status
const errorLogs = service.filterLogs(logs, { status: "error" });
expect(errorLogs).toHaveLength(1);
expect(errorLogs[0].level).toBe("ERROR");
// Filter by search term
const productALogs = service.filterLogs(logs, {
searchTerm: "Product A",
});
expect(productALogs).toHaveLength(1);
expect(productALogs[0].productTitle).toBe("Product A");
});
it("should handle date range filtering", () => {
const now = new Date();
const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
const lastWeek = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const logs = [
{
id: "log_1",
timestamp: now,
title: "Recent Log",
message: "Recent message",
level: "INFO",
},
{
id: "log_2",
timestamp: yesterday,
title: "Yesterday Log",
message: "Yesterday message",
level: "INFO",
},
{
id: "log_3",
timestamp: lastWeek,
title: "Old Log",
message: "Old message",
level: "INFO",
},
];
// Filter by today
const todayLogs = service.filterLogs(logs, { dateRange: "today" });
expect(todayLogs).toHaveLength(1);
expect(todayLogs[0].title).toBe("Recent Log");
// Filter by week
const weekLogs = service.filterLogs(logs, { dateRange: "week" });
expect(weekLogs.length).toBeGreaterThanOrEqual(2); // Should include recent and yesterday
});
});
describe("preloading", () => {
it("should preload next page without blocking", async () => {
const options = {
page: 0,
pageSize: 10,
dateRange: "all",
operationType: "all",
status: "all",
searchTerm: "",
};
// Mock the getFilteredLogs method to avoid actual file operations
service.getFilteredLogs = jest.fn().mockResolvedValue({
entries: [],
pagination: { hasNextPage: true },
});
// Preload should not throw errors
await expect(service.preloadNextPage(options)).resolves.toBeUndefined();
});
});
});

View File

@@ -0,0 +1,79 @@
/**
* Basic ScheduleService Tests
* Tests for core functionality
*/
const fs = require("fs");
const ScheduleService = require("../../../src/tui/services/ScheduleService");
describe("ScheduleService Basic Tests", () => {
let scheduleService;
const testSchedulesFile = "test-schedules-basic.json";
beforeEach(() => {
scheduleService = new ScheduleService();
scheduleService.schedulesFile = testSchedulesFile;
scheduleService.lockFile = `${testSchedulesFile}.lock`;
// Clean up any existing test files
try {
fs.unlinkSync(testSchedulesFile);
} catch (error) {
// File doesn't exist, which is fine
}
});
afterEach(() => {
// Remove test files
try {
fs.unlinkSync(testSchedulesFile);
} catch (error) {
// File doesn't exist, which is fine
}
});
test("should validate schedule data", () => {
const validSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
description: "Test schedule",
};
expect(() =>
scheduleService.validateScheduleData(validSchedule)
).not.toThrow();
});
test("should reject invalid operation types", () => {
const invalidSchedule = {
operationType: "invalid",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
};
expect(() =>
scheduleService.validateScheduleData(invalidSchedule)
).toThrow();
});
test("should calculate checksum correctly", () => {
const data = [{ id: "1", name: "test" }];
const checksum1 = scheduleService.calculateChecksum(data);
const checksum2 = scheduleService.calculateChecksum(data);
expect(checksum1).toBe(checksum2);
expect(typeof checksum1).toBe("string");
expect(checksum1.length).toBe(32); // MD5 hash length
});
test("should provide service statistics", () => {
const stats = scheduleService.getServiceStats();
expect(stats).toHaveProperty("schedulesLoaded");
expect(stats).toHaveProperty("schedulesCount");
expect(stats).toHaveProperty("activeSchedules");
expect(stats).toHaveProperty("pendingOperations");
expect(stats).toHaveProperty("memoryUsage");
});
});

View File

@@ -0,0 +1,374 @@
/**
* Enhanced ScheduleService Tests
* Tests for data persistence, state management, and concurrent access
* Requirements: 5.1, 5.4, 5.6
*/
const fs = require("fs");
const path = require("path");
const ScheduleService = require("../../../src/tui/services/ScheduleService");
describe("ScheduleService Enhanced Features", () => {
let scheduleService;
const testSchedulesFile = "test-schedules.json";
const testLockFile = "test-schedules.json.lock";
beforeEach(() => {
// Create service with test file
scheduleService = new ScheduleService();
scheduleService.schedulesFile = testSchedulesFile;
scheduleService.lockFile = testLockFile;
// Clean up any existing test files
[
testSchedulesFile,
testLockFile,
`${testSchedulesFile}.backup`,
`${testSchedulesFile}.tmp.${Date.now()}`,
].forEach((file) => {
try {
fs.unlinkSync(file);
} catch (error) {
// File doesn't exist, which is fine
}
});
});
afterEach(async () => {
// Cleanup
await scheduleService.cleanup();
// Remove test files
[testSchedulesFile, testLockFile, `${testSchedulesFile}.backup`].forEach(
(file) => {
try {
fs.unlinkSync(file);
} catch (error) {
// File doesn't exist, which is fine
}
}
);
});
describe("Data Persistence", () => {
test("should save schedules with metadata and checksum", async () => {
const testSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(), // Tomorrow
recurrence: "once",
description: "Test schedule",
enabled: true,
};
const savedSchedule = await scheduleService.addSchedule(testSchedule);
expect(savedSchedule.id).toBeDefined();
// Check file structure
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
const parsedData = JSON.parse(fileContent);
expect(parsedData.version).toBe("1.0");
expect(parsedData.lastModified).toBeDefined();
expect(parsedData.schedules).toHaveLength(1);
expect(parsedData.metadata.totalSchedules).toBe(1);
expect(parsedData.metadata.checksum).toBeDefined();
});
test("should create backup before saving", async () => {
// Create initial schedule
const schedule1 = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
description: "First schedule",
};
await scheduleService.addSchedule(schedule1);
// Add another schedule (should create backup)
const schedule2 = {
operationType: "rollback",
scheduledTime: new Date(Date.now() + 172800000).toISOString(),
recurrence: "once",
description: "Second schedule",
};
await scheduleService.addSchedule(schedule2);
// Check that backup exists
expect(fs.existsSync(`${testSchedulesFile}.backup`)).toBe(true);
});
test("should verify data integrity with checksum", async () => {
const testSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "daily",
description: "Integrity test",
};
await scheduleService.addSchedule(testSchedule);
// Manually corrupt the file
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
const parsedData = JSON.parse(fileContent);
// Change checksum to simulate corruption
parsedData.metadata.checksum = "invalid-checksum";
fs.writeFileSync(testSchedulesFile, JSON.stringify(parsedData, null, 2));
// Loading should detect corruption
const newService = new ScheduleService();
newService.schedulesFile = testSchedulesFile;
await expect(newService.loadSchedules()).rejects.toThrow();
});
});
describe("File Locking", () => {
test("should acquire and release file locks", async () => {
await scheduleService.acquireFileLock();
expect(fs.existsSync(testLockFile)).toBe(true);
await scheduleService.releaseFileLock();
expect(fs.existsSync(testLockFile)).toBe(false);
});
test("should handle concurrent access attempts", async () => {
// Simulate concurrent access
const service1 = new ScheduleService();
const service2 = new ScheduleService();
service1.schedulesFile = testSchedulesFile;
service1.lockFile = testLockFile;
service2.schedulesFile = testSchedulesFile;
service2.lockFile = testLockFile;
// First service acquires lock
await service1.acquireFileLock();
// Second service should fail to acquire lock
await expect(service2.acquireFileLock()).rejects.toThrow(
/Failed to acquire file lock/
);
// Release first lock
await service1.releaseFileLock();
// Now second service should be able to acquire lock
await expect(service2.acquireFileLock()).resolves.not.toThrow();
await service2.releaseFileLock();
});
test("should handle stale lock files", async () => {
// Create a stale lock file
const staleLockData = {
pid: 99999,
timestamp: new Date(Date.now() - 10000).toISOString(), // 10 seconds ago
operation: "test",
};
fs.writeFileSync(testLockFile, JSON.stringify(staleLockData));
// Should be able to acquire lock by removing stale lock
await expect(scheduleService.acquireFileLock()).resolves.not.toThrow();
await scheduleService.releaseFileLock();
});
});
describe("Data Validation", () => {
test("should validate schedule data comprehensively", () => {
const validSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "weekly",
description: "Valid schedule",
enabled: true,
};
expect(() =>
scheduleService.validateScheduleData(validSchedule)
).not.toThrow();
});
test("should reject invalid operation types", () => {
const invalidSchedule = {
operationType: "invalid",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
};
expect(() =>
scheduleService.validateScheduleData(invalidSchedule)
).toThrow(/must be one of: update, rollback/);
});
test("should reject past dates", () => {
const pastSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() - 86400000).toISOString(), // Yesterday
recurrence: "once",
};
expect(() => scheduleService.validateScheduleData(pastSchedule)).toThrow(
/must be in the future/
);
});
test("should validate description length", () => {
const longDescription = "x".repeat(501); // Exceeds 500 char limit
const invalidSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
description: longDescription,
};
expect(() =>
scheduleService.validateScheduleData(invalidSchedule)
).toThrow(/must not exceed 500 characters/);
});
test("should prevent rollback operations from being recurring", () => {
const invalidSchedule = {
operationType: "rollback",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "daily", // Rollbacks should only be 'once'
};
expect(() =>
scheduleService.validateScheduleData(invalidSchedule)
).toThrow(/Rollback operations can only be scheduled once/);
});
});
describe("Error Recovery", () => {
test("should recover from corrupted files using backup", async () => {
// Create valid schedule first
const validSchedule = {
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
description: "Recovery test",
};
await scheduleService.addSchedule(validSchedule);
// Corrupt the main file
fs.writeFileSync(testSchedulesFile, "invalid json content");
// Recovery should work
const recovered = await scheduleService.recoverFromCorruption();
expect(Array.isArray(recovered)).toBe(true);
});
test("should create empty file when no recovery possible", async () => {
// Create corrupted file with no backup
fs.writeFileSync(testSchedulesFile, "completely invalid");
const recovered = await scheduleService.recoverFromCorruption();
expect(recovered).toEqual([]);
// Should create new empty file
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
const parsedData = JSON.parse(fileContent);
expect(parsedData.schedules).toEqual([]);
});
});
describe("State Management", () => {
test("should cleanup resources properly", async () => {
// Add some schedules and create locks
await scheduleService.addSchedule({
operationType: "update",
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
recurrence: "once",
});
await scheduleService.acquireFileLock();
// Cleanup should clear everything
scheduleService.cleanup();
expect(scheduleService.persistenceQueue).toEqual([]);
expect(scheduleService.isProcessingQueue).toBe(false);
expect(scheduleService.isLoaded).toBe(false);
});
test("should provide system state validation", async () => {
const report = await scheduleService.validateSystemState();
expect(report).toHaveProperty("fileExists");
expect(report).toHaveProperty("fileReadable");
expect(report).toHaveProperty("fileWritable");
expect(report).toHaveProperty("dataValid");
expect(report).toHaveProperty("issues");
expect(report).toHaveProperty("recommendations");
});
test("should provide service statistics", () => {
const stats = scheduleService.getServiceStats();
expect(stats).toHaveProperty("schedulesLoaded");
expect(stats).toHaveProperty("schedulesCount");
expect(stats).toHaveProperty("activeSchedules");
expect(stats).toHaveProperty("pendingOperations");
expect(stats).toHaveProperty("memoryUsage");
});
});
describe("Atomic Operations", () => {
test("should queue multiple save operations", async () => {
const promises = [];
// Queue multiple operations simultaneously
for (let i = 0; i < 5; i++) {
const schedule = {
operationType: "update",
scheduledTime: new Date(
Date.now() + 86400000 + i * 1000
).toISOString(),
recurrence: "once",
description: `Schedule ${i}`,
};
promises.push(scheduleService.addSchedule(schedule));
}
// All should complete successfully
const results = await Promise.all(promises);
expect(results).toHaveLength(5);
// All should have unique IDs
const ids = results.map((r) => r.id);
const uniqueIds = new Set(ids);
expect(uniqueIds.size).toBe(5);
});
test("should maintain data consistency during concurrent operations", async () => {
const operations = [];
// Create multiple concurrent add/update/delete operations
for (let i = 0; i < 3; i++) {
operations.push(
scheduleService.addSchedule({
operationType: "update",
scheduledTime: new Date(
Date.now() + 86400000 + i * 1000
).toISOString(),
recurrence: "once",
description: `Concurrent ${i}`,
})
);
}
const schedules = await Promise.all(operations);
// Verify all schedules were saved
const allSchedules = await scheduleService.getAllSchedules();
expect(allSchedules).toHaveLength(3);
// Verify data integrity
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
const parsedData = JSON.parse(fileContent);
expect(parsedData.metadata.totalSchedules).toBe(3);
});
});
});

View File

@@ -0,0 +1,256 @@
const TagAnalysisService = require("../../../src/tui/services/TagAnalysisService.js");
// Mock dependencies
const mockShopifyService = {
// Mock implementation
};
const mockProductService = {
debugFetchAllProductTags: jest.fn(),
fetchProductsByTag: jest.fn(),
};
describe("TagAnalysisService Performance Optimizations", () => {
let service;
beforeEach(() => {
service = new TagAnalysisService(mockShopifyService, mockProductService);
jest.clearAllMocks();
});
afterEach(() => {
if (service) {
service.destroy();
}
});
describe("lazy loading", () => {
it("should support paginated tag fetching", async () => {
const mockProducts = Array.from({ length: 100 }, (_, i) => ({
id: `product_${i}`,
title: `Product ${i}`,
tags: [`tag_${i % 10}`], // 10 different tags
variants: [
{
id: `variant_${i}`,
price: (i + 1) * 10,
title: `Variant ${i}`,
},
],
}));
mockProductService.debugFetchAllProductTags.mockResolvedValue(
mockProducts
);
const result = await service.fetchAllTags(100, {
page: 0,
pageSize: 5,
enableLazyLoading: true,
sortBy: "productCount",
sortOrder: "desc",
});
expect(result.tags).toHaveLength(5); // Should return only 5 tags due to pagination
expect(result.metadata.pagination).toBeDefined();
expect(result.metadata.pagination.page).toBe(0);
expect(result.metadata.pagination.pageSize).toBe(5);
expect(result.metadata.pagination.hasMore).toBe(true);
});
it("should fetch tags lazily with filtering", async () => {
const mockTags = Array.from({ length: 50 }, (_, i) => ({
tag: `tag_${i}`,
productCount: i + 1,
percentage: ((i + 1) / 50) * 100,
variantCount: (i + 1) * 2,
totalValue: (i + 1) * 100,
averagePrice: 50 + i,
priceRange: { min: 10, max: 100 },
}));
// Mock the full dataset in cache
service.cache.set("all_tags_full_dataset", {
data: mockTags,
timestamp: Date.now(),
});
const result = await service.fetchTagsLazy({
page: 0,
pageSize: 10,
searchQuery: "tag_1",
minProductCount: 5,
sortBy: "productCount",
sortOrder: "desc",
});
expect(result.tags.length).toBeLessThanOrEqual(10);
expect(result.metadata.totalItems).toBeGreaterThan(0);
expect(result.metadata.hasMore).toBeDefined();
});
});
describe("caching optimizations", () => {
it("should cache tag analysis results", async () => {
const mockProducts = [
{
id: "product_1",
title: "Product 1",
tags: ["tag1", "tag2"],
variants: [{ id: "variant_1", price: "10.00" }],
},
];
mockProductService.debugFetchAllProductTags.mockResolvedValue(
mockProducts
);
// First call
const result1 = await service.fetchAllTags(10);
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
1
);
// Second call should use cache
const result2 = await service.fetchAllTags(10);
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
1
); // No additional call
expect(result1).toEqual(result2);
});
it("should track cache hit ratio", async () => {
const mockProducts = [
{
id: "product_1",
title: "Product 1",
tags: ["tag1"],
variants: [{ id: "variant_1", price: "10.00" }],
},
];
mockProductService.debugFetchAllProductTags.mockResolvedValue(
mockProducts
);
// Make multiple calls
await service.fetchAllTags(10);
await service.fetchAllTags(10); // Cache hit
await service.fetchAllTags(10); // Cache hit
const memoryStats = service.getMemoryStats();
expect(memoryStats.cacheHitRatio).toBeGreaterThan(0);
expect(memoryStats.cacheEntries).toBeGreaterThan(0);
});
it("should clean up expired cache entries", async () => {
// Add some cache entries with old timestamps
service.cache.set("old_entry", {
data: { test: "data" },
timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
});
service.cache.set("new_entry", {
data: { test: "data" },
timestamp: Date.now(),
});
expect(service.cache.size).toBe(2);
service.cleanup();
expect(service.cache.size).toBe(1);
expect(service.cache.has("new_entry")).toBe(true);
expect(service.cache.has("old_entry")).toBe(false);
});
});
describe("sorting optimizations", () => {
it("should sort tags by different criteria", () => {
const tags = [
{ tag: "c", productCount: 10, averagePrice: 50, totalValue: 500 },
{ tag: "a", productCount: 20, averagePrice: 30, totalValue: 600 },
{ tag: "b", productCount: 15, averagePrice: 40, totalValue: 400 },
];
// Sort by product count (desc)
service.sortTags(tags, "productCount", "desc");
expect(tags[0].tag).toBe("a"); // 20 products
// Sort by tag name (asc)
service.sortTags(tags, "tag", "asc");
expect(tags[0].tag).toBe("a"); // alphabetically first
// Sort by average price (desc)
service.sortTags(tags, "averagePrice", "desc");
expect(tags[0].tag).toBe("c"); // highest price
});
});
describe("memory management", () => {
it("should provide memory usage statistics", () => {
const stats = service.getMemoryStats();
expect(stats).toHaveProperty("cacheEntries");
expect(stats).toHaveProperty("estimatedSizeBytes");
expect(stats).toHaveProperty("estimatedSizeMB");
expect(stats).toHaveProperty("maxEntries");
expect(stats).toHaveProperty("cacheHitRatio");
});
it("should limit cache size to prevent memory issues", async () => {
// Fill cache beyond limit
for (let i = 0; i < 60; i++) {
service.cache.set(`entry_${i}`, {
data: { large: "data".repeat(1000) },
timestamp: Date.now() - i * 1000, // Different timestamps
});
}
expect(service.cache.size).toBeGreaterThan(50);
service.cleanup();
expect(service.cache.size).toBeLessThanOrEqual(50);
});
it("should clean up resources on destroy", () => {
const initialCacheSize = service.cache.size;
service.destroy();
expect(service.cache.size).toBe(0);
expect(service.cleanupInterval).toBeNull();
});
});
describe("preloading", () => {
it("should preload next page without blocking", async () => {
const mockTags = Array.from({ length: 50 }, (_, i) => ({
tag: `tag_${i}`,
productCount: i + 1,
percentage: ((i + 1) / 50) * 100,
variantCount: (i + 1) * 2,
totalValue: (i + 1) * 100,
averagePrice: 50 + i,
priceRange: { min: 10, max: 100 },
}));
// Mock the full dataset in cache
service.cache.set("all_tags_full_dataset", {
data: mockTags,
timestamp: Date.now(),
});
const options = {
page: 0,
pageSize: 10,
sortBy: "productCount",
sortOrder: "desc",
};
// Preload should not throw errors
await expect(service.preloadNextPage(options)).resolves.toBeUndefined();
});
});
});