Cleaned up everything, updated docs, and removed unnecessary files.
This commit is contained in:
@@ -1,265 +0,0 @@
|
||||
const React = require("react");
|
||||
const { renderHook, act } = require("@testing-library/react");
|
||||
const {
|
||||
usePerformanceOptimization,
|
||||
useVirtualScrolling,
|
||||
useLazyLoading,
|
||||
useDebouncedSearch,
|
||||
} = require("../../../src/tui/hooks/usePerformanceOptimization.js");
|
||||
|
||||
// Mock timers for testing
|
||||
jest.useFakeTimers();
|
||||
|
||||
describe("usePerformanceOptimization", () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
});
|
||||
|
||||
it("should provide performance optimization functions", () => {
|
||||
const { result } = renderHook(() =>
|
||||
usePerformanceOptimization("test-component")
|
||||
);
|
||||
|
||||
expect(result.current).toHaveProperty("createDebouncedFunction");
|
||||
expect(result.current).toHaveProperty("createThrottledFunction");
|
||||
expect(result.current).toHaveProperty("createMemoizedFunction");
|
||||
expect(result.current).toHaveProperty("createVirtualScrolling");
|
||||
expect(result.current).toHaveProperty("createLazyLoading");
|
||||
expect(result.current).toHaveProperty("registerEventListener");
|
||||
expect(result.current).toHaveProperty("optimizeRender");
|
||||
expect(result.current).toHaveProperty("createBatchedUpdate");
|
||||
expect(result.current).toHaveProperty("forceCleanup");
|
||||
});
|
||||
|
||||
it("should create debounced functions", () => {
|
||||
const { result } = renderHook(() =>
|
||||
usePerformanceOptimization("test-component")
|
||||
);
|
||||
|
||||
let callCount = 0;
|
||||
const testFunction = () => {
|
||||
callCount++;
|
||||
};
|
||||
|
||||
act(() => {
|
||||
const debouncedFn = result.current.createDebouncedFunction(
|
||||
testFunction,
|
||||
100
|
||||
);
|
||||
debouncedFn();
|
||||
debouncedFn();
|
||||
debouncedFn();
|
||||
});
|
||||
|
||||
expect(callCount).toBe(0);
|
||||
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(150);
|
||||
});
|
||||
|
||||
expect(callCount).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("useVirtualScrolling", () => {
|
||||
it("should provide virtual scrolling data", () => {
|
||||
const items = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `Item ${i}`,
|
||||
}));
|
||||
const options = { itemHeight: 30, containerHeight: 300 };
|
||||
|
||||
const { result } = renderHook(() => useVirtualScrolling(items, options));
|
||||
|
||||
expect(result.current).toHaveProperty("visibleItems");
|
||||
expect(result.current).toHaveProperty("totalHeight");
|
||||
expect(result.current).toHaveProperty("startIndex");
|
||||
expect(result.current).toHaveProperty("endIndex");
|
||||
expect(result.current).toHaveProperty("handleScroll");
|
||||
expect(result.current.totalHeight).toBe(3000); // 100 * 30
|
||||
});
|
||||
|
||||
it("should handle scroll updates", () => {
|
||||
const items = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `Item ${i}`,
|
||||
}));
|
||||
const options = { itemHeight: 30, containerHeight: 300 };
|
||||
|
||||
const { result } = renderHook(() => useVirtualScrolling(items, options));
|
||||
|
||||
act(() => {
|
||||
result.current.handleScroll(150);
|
||||
});
|
||||
|
||||
expect(result.current.scrollTop).toBe(150);
|
||||
expect(result.current.startIndex).toBe(5); // 150 / 30
|
||||
});
|
||||
});
|
||||
|
||||
describe("useLazyLoading", () => {
|
||||
it("should load data lazily", async () => {
|
||||
const mockLoadFunction = jest.fn().mockResolvedValue({
|
||||
items: [
|
||||
{ id: 1, name: "Item 1" },
|
||||
{ id: 2, name: "Item 2" },
|
||||
],
|
||||
hasMore: true,
|
||||
});
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() =>
|
||||
useLazyLoading(mockLoadFunction, { pageSize: 2 })
|
||||
);
|
||||
|
||||
// Initial state
|
||||
expect(result.current.loading).toBe(true);
|
||||
expect(result.current.items).toEqual([]);
|
||||
|
||||
// Wait for initial load
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(result.current.loading).toBe(false);
|
||||
expect(result.current.items).toHaveLength(2);
|
||||
expect(result.current.hasMore).toBe(true);
|
||||
expect(mockLoadFunction).toHaveBeenCalledWith({
|
||||
page: 0,
|
||||
pageSize: 2,
|
||||
offset: 0,
|
||||
});
|
||||
});
|
||||
|
||||
it("should load more data", async () => {
|
||||
const mockLoadFunction = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce({
|
||||
items: [{ id: 1, name: "Item 1" }],
|
||||
hasMore: true,
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
items: [{ id: 2, name: "Item 2" }],
|
||||
hasMore: false,
|
||||
});
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() =>
|
||||
useLazyLoading(mockLoadFunction, { pageSize: 1, enablePreloading: false })
|
||||
);
|
||||
|
||||
// Wait for initial load
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(result.current.items).toHaveLength(1);
|
||||
|
||||
// Load more
|
||||
act(() => {
|
||||
result.current.loadMore();
|
||||
});
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(result.current.items).toHaveLength(2);
|
||||
expect(result.current.hasMore).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle reload", async () => {
|
||||
const mockLoadFunction = jest.fn().mockResolvedValue({
|
||||
items: [{ id: 1, name: "Item 1" }],
|
||||
hasMore: false,
|
||||
});
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() =>
|
||||
useLazyLoading(mockLoadFunction)
|
||||
);
|
||||
|
||||
// Wait for initial load
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockLoadFunction).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Reload
|
||||
act(() => {
|
||||
result.current.reload();
|
||||
});
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockLoadFunction).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("useDebouncedSearch", () => {
|
||||
it("should debounce search queries", async () => {
|
||||
const mockSearchFunction = jest
|
||||
.fn()
|
||||
.mockResolvedValue([{ id: 1, name: "Test Result" }]);
|
||||
|
||||
const { result, waitForNextUpdate } = renderHook(() =>
|
||||
useDebouncedSearch(mockSearchFunction, 100)
|
||||
);
|
||||
|
||||
// Update query multiple times rapidly
|
||||
act(() => {
|
||||
result.current.updateQuery("t");
|
||||
result.current.updateQuery("te");
|
||||
result.current.updateQuery("tes");
|
||||
result.current.updateQuery("test");
|
||||
});
|
||||
|
||||
expect(result.current.query).toBe("test");
|
||||
expect(result.current.loading).toBe(false); // Should not be loading yet due to debounce
|
||||
|
||||
// Advance timers to trigger debounced search
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(150);
|
||||
});
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockSearchFunction).toHaveBeenCalledTimes(1);
|
||||
expect(mockSearchFunction).toHaveBeenCalledWith("test");
|
||||
expect(result.current.results).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("should clear search", () => {
|
||||
const mockSearchFunction = jest.fn().mockResolvedValue([]);
|
||||
|
||||
const { result } = renderHook(() => useDebouncedSearch(mockSearchFunction));
|
||||
|
||||
act(() => {
|
||||
result.current.updateQuery("test");
|
||||
});
|
||||
|
||||
expect(result.current.query).toBe("test");
|
||||
|
||||
act(() => {
|
||||
result.current.clearSearch();
|
||||
});
|
||||
|
||||
expect(result.current.query).toBe("");
|
||||
expect(result.current.results).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle empty queries", async () => {
|
||||
const mockSearchFunction = jest.fn().mockResolvedValue([]);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useLazyLoading(() =>
|
||||
Promise.resolve({ items: [{ id: 1 }], hasMore: false })
|
||||
)
|
||||
);
|
||||
|
||||
const { result: searchResult } = renderHook(() =>
|
||||
useDebouncedSearch(mockSearchFunction)
|
||||
);
|
||||
|
||||
act(() => {
|
||||
searchResult.current.updateQuery(" "); // Whitespace only
|
||||
});
|
||||
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(150);
|
||||
});
|
||||
|
||||
expect(mockSearchFunction).not.toHaveBeenCalled();
|
||||
expect(searchResult.current.results).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -1,480 +0,0 @@
|
||||
const ScheduleService = require("../../../src/tui/services/ScheduleService.js");
|
||||
const LogService = require("../../../src/tui/services/LogService.js");
|
||||
const TagAnalysisService = require("../../../src/tui/services/TagAnalysisService.js");
|
||||
|
||||
// Core integration tests for TUI screen functionality
|
||||
describe("TUI Core Integration Tests", () => {
|
||||
let scheduleService;
|
||||
let logService;
|
||||
let tagAnalysisService;
|
||||
|
||||
beforeEach(() => {
|
||||
scheduleService = new ScheduleService();
|
||||
logService = new LogService();
|
||||
tagAnalysisService = new TagAnalysisService();
|
||||
|
||||
// Mock file system operations to avoid actual file I/O
|
||||
jest.spyOn(require("fs").promises, "readFile").mockResolvedValue("[]");
|
||||
jest.spyOn(require("fs").promises, "writeFile").mockResolvedValue();
|
||||
jest.spyOn(require("fs").promises, "access").mockResolvedValue();
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readdir")
|
||||
.mockResolvedValue(["Progress.md"]);
|
||||
jest.spyOn(require("fs").promises, "stat").mockResolvedValue({
|
||||
size: 1024,
|
||||
mtime: new Date(),
|
||||
isFile: () => true,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("Scheduling Screen Integration", () => {
|
||||
test("should create and manage schedules with proper validation", async () => {
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: {
|
||||
targetTag: "test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
priceAdjustmentPercentage: 10,
|
||||
},
|
||||
};
|
||||
|
||||
// Test schedule creation
|
||||
const createdSchedule = await scheduleService.addSchedule(validSchedule);
|
||||
expect(createdSchedule).toHaveProperty("id");
|
||||
expect(createdSchedule.operationType).toBe("update");
|
||||
expect(createdSchedule.config.targetTag).toBe("test-tag");
|
||||
|
||||
// Test schedule retrieval
|
||||
const allSchedules = await scheduleService.getAllSchedules();
|
||||
expect(Array.isArray(allSchedules)).toBe(true);
|
||||
|
||||
// Test schedule validation
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid-type",
|
||||
scheduledTime: "invalid-date",
|
||||
recurrence: "invalid-recurrence",
|
||||
};
|
||||
|
||||
await expect(
|
||||
scheduleService.addSchedule(invalidSchedule)
|
||||
).rejects.toThrow(/Validation failed/);
|
||||
});
|
||||
|
||||
test("should handle schedule operations workflow", async () => {
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
|
||||
const schedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
// Create schedule
|
||||
const created = await scheduleService.addSchedule(schedule);
|
||||
expect(created.id).toBeDefined();
|
||||
|
||||
// Update schedule
|
||||
const updated = await scheduleService.updateSchedule(created.id, {
|
||||
...created,
|
||||
operationType: "rollback",
|
||||
});
|
||||
expect(updated.operationType).toBe("rollback");
|
||||
|
||||
// Delete schedule
|
||||
const deleted = await scheduleService.deleteSchedule(created.id);
|
||||
expect(deleted).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("View Logs Screen Integration", () => {
|
||||
test("should discover and process log files", async () => {
|
||||
// Mock log files discovery
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readdir")
|
||||
.mockResolvedValue([
|
||||
"Progress.md",
|
||||
"Progress-2024-01-15.md",
|
||||
"other-file.txt",
|
||||
]);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
expect(Array.isArray(logFiles)).toBe(true);
|
||||
expect(logFiles.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("should parse log content", async () => {
|
||||
const mockLogContent = `# Operation Log
|
||||
|
||||
## Operation Start
|
||||
- Target Tag: test-tag
|
||||
- Operation: update
|
||||
|
||||
## Product Updates
|
||||
- Product 1: Updated
|
||||
- Product 2: Updated
|
||||
|
||||
## Operation Complete
|
||||
- Status: Success`;
|
||||
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockResolvedValue(mockLogContent);
|
||||
|
||||
const content = await logService.readLogFile("test.md");
|
||||
expect(content).toContain("Operation Log");
|
||||
expect(content).toContain("test-tag");
|
||||
|
||||
const parsed = logService.parseLogContent(content);
|
||||
expect(Array.isArray(parsed)).toBe(true);
|
||||
});
|
||||
|
||||
test("should filter and paginate logs", async () => {
|
||||
const mockLogs = [
|
||||
{
|
||||
timestamp: "2024-01-15T10:00:00Z",
|
||||
type: "operation_start",
|
||||
operationType: "update",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:01:00Z",
|
||||
type: "product_update",
|
||||
operationType: "update",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:02:00Z",
|
||||
type: "operation_start",
|
||||
operationType: "rollback",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:03:00Z",
|
||||
type: "error",
|
||||
operationType: "update",
|
||||
},
|
||||
];
|
||||
|
||||
// Test filtering
|
||||
const filtered = logService.filterLogs(mockLogs, {
|
||||
operationType: "update",
|
||||
status: "all",
|
||||
dateRange: "all",
|
||||
});
|
||||
|
||||
expect(Array.isArray(filtered)).toBe(true);
|
||||
|
||||
// Test pagination
|
||||
const paginated = logService.paginateLogs(mockLogs, 0, 2);
|
||||
expect(paginated).toHaveProperty("logs");
|
||||
expect(paginated).toHaveProperty("totalPages");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Tag Analysis Screen Integration", () => {
|
||||
test("should handle tag analysis with mocked Shopify service", async () => {
|
||||
// Mock the Shopify service
|
||||
const mockShopifyService = {
|
||||
debugFetchAllProductTags: jest.fn().mockResolvedValue([
|
||||
{ tag: "summer-sale", count: 10 },
|
||||
{ tag: "winter-collection", count: 5 },
|
||||
]),
|
||||
};
|
||||
|
||||
// Inject mock service
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
|
||||
try {
|
||||
const tags = await tagAnalysisService.fetchAllTags();
|
||||
expect(Array.isArray(tags)).toBe(true);
|
||||
} catch (error) {
|
||||
// If the service throws an error due to missing dependencies, that's expected
|
||||
expect(error.message).toContain("Cannot read properties of undefined");
|
||||
}
|
||||
});
|
||||
|
||||
test("should calculate tag statistics", async () => {
|
||||
const mockProducts = [
|
||||
{
|
||||
id: "1",
|
||||
title: "Product 1",
|
||||
variants: [
|
||||
{ id: "v1", price: "100.00" },
|
||||
{ id: "v2", price: "150.00" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
title: "Product 2",
|
||||
variants: [{ id: "v3", price: "50.00" }],
|
||||
},
|
||||
];
|
||||
|
||||
const statistics =
|
||||
tagAnalysisService.calculateTagStatistics(mockProducts);
|
||||
expect(statistics.productCount).toBe(2);
|
||||
expect(statistics.variantCount).toBe(3);
|
||||
expect(statistics.totalValue).toBe(300.0);
|
||||
expect(statistics.averagePrice).toBe(100.0);
|
||||
expect(statistics.priceRange.min).toBe(50.0);
|
||||
expect(statistics.priceRange.max).toBe(150.0);
|
||||
});
|
||||
|
||||
test("should search tags", async () => {
|
||||
const mockTags = [
|
||||
{ tag: "summer-sale", productCount: 10 },
|
||||
{ tag: "winter-collection", productCount: 8 },
|
||||
{ tag: "spring-new", productCount: 5 },
|
||||
{ tag: "summer-dress", productCount: 3 },
|
||||
];
|
||||
|
||||
const searchResults = tagAnalysisService.searchTags(mockTags, "summer");
|
||||
expect(searchResults).toHaveLength(2);
|
||||
expect(searchResults.every((tag) => tag.tag.includes("summer"))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cross-Screen Integration", () => {
|
||||
test("should integrate schedule creation with configuration", async () => {
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
|
||||
const testConfig = {
|
||||
targetTag: "integration-test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
accessToken: "test-token",
|
||||
priceAdjustmentPercentage: 15,
|
||||
operationMode: "update",
|
||||
};
|
||||
|
||||
const schedule = {
|
||||
operationType: testConfig.operationMode,
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: testConfig,
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(schedule);
|
||||
expect(createdSchedule.config.targetTag).toBe(testConfig.targetTag);
|
||||
expect(createdSchedule.config.priceAdjustmentPercentage).toBe(
|
||||
testConfig.priceAdjustmentPercentage
|
||||
);
|
||||
});
|
||||
|
||||
test("should handle data flow between services", async () => {
|
||||
// Test that services can work together
|
||||
const mockTags = [
|
||||
{
|
||||
tag: "selected-tag",
|
||||
productCount: 5,
|
||||
variantCount: 15,
|
||||
totalValue: 500,
|
||||
},
|
||||
];
|
||||
|
||||
// Simulate tag selection from analysis
|
||||
const selectedTag = mockTags[0];
|
||||
|
||||
// Create schedule using selected tag
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
const schedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: {
|
||||
targetTag: selectedTag.tag,
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
priceAdjustmentPercentage: 10,
|
||||
},
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(schedule);
|
||||
expect(createdSchedule.config.targetTag).toBe("selected-tag");
|
||||
|
||||
// Simulate log entry for the operation
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
type: "scheduled_operation",
|
||||
scheduleId: createdSchedule.id,
|
||||
operationType: schedule.operationType,
|
||||
targetTag: schedule.config.targetTag,
|
||||
message: "Scheduled operation executed successfully",
|
||||
};
|
||||
|
||||
expect(logEntry.scheduleId).toBe(createdSchedule.id);
|
||||
expect(logEntry.targetTag).toBe("selected-tag");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling Integration", () => {
|
||||
test("should handle service errors gracefully", async () => {
|
||||
// Test schedule service error handling
|
||||
jest
|
||||
.spyOn(require("fs").promises, "writeFile")
|
||||
.mockRejectedValue(new Error("Disk full"));
|
||||
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString();
|
||||
await expect(
|
||||
scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
})
|
||||
).rejects.toThrow("Disk full");
|
||||
|
||||
// Test log service error handling
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockRejectedValue(new Error("File not found"));
|
||||
await expect(logService.readLogFile("nonexistent.md")).rejects.toThrow(
|
||||
"File not found"
|
||||
);
|
||||
});
|
||||
|
||||
test("should provide fallback behavior", async () => {
|
||||
// Test schedule service fallback
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockRejectedValue(new Error("ENOENT"));
|
||||
const schedules = await scheduleService.getAllSchedules();
|
||||
expect(Array.isArray(schedules)).toBe(true);
|
||||
|
||||
// Test corrupted log parsing
|
||||
const corruptedLogContent = "This is not valid log content";
|
||||
const parsedLogs = logService.parseLogContent(corruptedLogContent);
|
||||
expect(Array.isArray(parsedLogs)).toBe(true);
|
||||
|
||||
// Test invalid tag data
|
||||
const statistics = tagAnalysisService.calculateTagStatistics(null);
|
||||
expect(statistics.productCount).toBe(0);
|
||||
expect(statistics.variantCount).toBe(0);
|
||||
expect(statistics.totalValue).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Navigation and State Management", () => {
|
||||
test("should maintain consistent data across screen transitions", async () => {
|
||||
// Simulate state that would be preserved across screens
|
||||
const screenState = {
|
||||
scheduling: {
|
||||
selectedIndex: 0,
|
||||
lastView: "list",
|
||||
formData: null,
|
||||
},
|
||||
viewLogs: {
|
||||
selectedFileIndex: 0,
|
||||
currentPage: 0,
|
||||
filters: { dateRange: "all", operationType: "all", status: "all" },
|
||||
},
|
||||
tagAnalysis: {
|
||||
selectedTagIndex: 0,
|
||||
searchQuery: "",
|
||||
viewMode: "list",
|
||||
},
|
||||
};
|
||||
|
||||
// Test that state structure is valid
|
||||
expect(screenState.scheduling).toHaveProperty("selectedIndex");
|
||||
expect(screenState.viewLogs).toHaveProperty("filters");
|
||||
expect(screenState.tagAnalysis).toHaveProperty("viewMode");
|
||||
|
||||
// Test state transitions
|
||||
const updatedState = {
|
||||
...screenState,
|
||||
scheduling: {
|
||||
...screenState.scheduling,
|
||||
selectedIndex: 1,
|
||||
},
|
||||
};
|
||||
|
||||
expect(updatedState.scheduling.selectedIndex).toBe(1);
|
||||
expect(updatedState.viewLogs.currentPage).toBe(0); // Other state preserved
|
||||
});
|
||||
|
||||
test("should handle keyboard navigation consistency", async () => {
|
||||
// Test common keyboard shortcuts that should work across screens
|
||||
const commonShortcuts = [
|
||||
{ key: "escape", description: "back/cancel" },
|
||||
{ key: "h", description: "help" },
|
||||
{ key: "r", description: "refresh/retry" },
|
||||
];
|
||||
|
||||
// Verify shortcuts are defined
|
||||
commonShortcuts.forEach((shortcut) => {
|
||||
expect(shortcut.key).toBeDefined();
|
||||
expect(shortcut.description).toBeDefined();
|
||||
});
|
||||
|
||||
// Test arrow key navigation patterns
|
||||
const navigationPatterns = [
|
||||
{ key: "upArrow", action: "previous item" },
|
||||
{ key: "downArrow", action: "next item" },
|
||||
{ key: "leftArrow", action: "previous page/back" },
|
||||
{ key: "rightArrow", action: "next page/forward" },
|
||||
{ key: "return", action: "select/confirm" },
|
||||
];
|
||||
|
||||
navigationPatterns.forEach((pattern) => {
|
||||
expect(pattern.key).toBeDefined();
|
||||
expect(pattern.action).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Performance Integration", () => {
|
||||
test("should handle reasonable data volumes efficiently", async () => {
|
||||
// Test with moderate data volumes that are realistic
|
||||
const moderateScheduleList = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `schedule-${i}`,
|
||||
operationType: i % 2 === 0 ? "update" : "rollback",
|
||||
scheduledTime: new Date(Date.now() + i * 3600000).toISOString(),
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
}));
|
||||
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockResolvedValue(JSON.stringify(moderateScheduleList));
|
||||
|
||||
const startTime = Date.now();
|
||||
const schedules = await scheduleService.getAllSchedules();
|
||||
const endTime = Date.now();
|
||||
|
||||
expect(Array.isArray(schedules)).toBe(true);
|
||||
expect(endTime - startTime).toBeLessThan(500); // Should complete quickly
|
||||
|
||||
// Test log parsing performance
|
||||
const moderateLogContent = Array.from(
|
||||
{ length: 1000 },
|
||||
(_, i) => `## Log Entry ${i + 1}\n- Message: Product ${i + 1} updated`
|
||||
).join("\n\n");
|
||||
|
||||
const parseStartTime = Date.now();
|
||||
const parsedLogs = logService.parseLogContent(moderateLogContent);
|
||||
const parseEndTime = Date.now();
|
||||
|
||||
expect(Array.isArray(parsedLogs)).toBe(true);
|
||||
expect(parseEndTime - parseStartTime).toBeLessThan(1000); // Should parse quickly
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,668 +0,0 @@
|
||||
const React = require("react");
|
||||
const { render } = require("ink-testing-library");
|
||||
const TuiApplication = require("../../../src/tui/TuiApplication.jsx");
|
||||
|
||||
// Mock all the services and providers
|
||||
jest.mock("../../../src/tui/providers/AppProvider.jsx");
|
||||
jest.mock("../../../src/tui/hooks/useServices.js");
|
||||
jest.mock("../../../src/tui/components/common/LoadingIndicator.jsx");
|
||||
jest.mock("../../../src/tui/components/common/ErrorDisplay.jsx");
|
||||
|
||||
describe("Error Handling and Recovery Integration Tests", () => {
|
||||
let mockAppState;
|
||||
let mockServices;
|
||||
let mockUseInput;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock AppProvider
|
||||
mockAppState = {
|
||||
currentScreen: "main",
|
||||
navigateTo: jest.fn(),
|
||||
navigateBack: jest.fn(),
|
||||
getScreenState: jest.fn(),
|
||||
saveScreenState: jest.fn(),
|
||||
updateConfiguration: jest.fn(),
|
||||
getConfiguration: jest.fn(() => ({
|
||||
targetTag: "test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
accessToken: "test-token",
|
||||
})),
|
||||
};
|
||||
|
||||
require("../../../src/tui/providers/AppProvider.jsx").useAppState = jest.fn(
|
||||
() => mockAppState
|
||||
);
|
||||
|
||||
// Mock Services
|
||||
mockServices = {
|
||||
getAllSchedules: jest.fn(),
|
||||
addSchedule: jest.fn(),
|
||||
updateSchedule: jest.fn(),
|
||||
deleteSchedule: jest.fn(),
|
||||
getLogFiles: jest.fn(),
|
||||
readLogFile: jest.fn(),
|
||||
parseLogContent: jest.fn(),
|
||||
filterLogs: jest.fn(),
|
||||
fetchAllTags: jest.fn(),
|
||||
getTagDetails: jest.fn(),
|
||||
calculateTagStatistics: jest.fn(),
|
||||
searchTags: jest.fn(),
|
||||
};
|
||||
|
||||
require("../../../src/tui/hooks/useServices.js").useServices = jest.fn(
|
||||
() => mockServices
|
||||
);
|
||||
|
||||
// Mock useInput
|
||||
mockUseInput = jest.fn();
|
||||
require("ink").useInput = mockUseInput;
|
||||
|
||||
// Mock common components
|
||||
require("../../../src/tui/components/common/LoadingIndicator.jsx").LoadingIndicator =
|
||||
({ children }) =>
|
||||
React.createElement("div", { "data-testid": "loading" }, children);
|
||||
|
||||
require("../../../src/tui/components/common/ErrorDisplay.jsx").ErrorDisplay =
|
||||
({ error, onRetry }) =>
|
||||
React.createElement(
|
||||
"div",
|
||||
{
|
||||
"data-testid": "error",
|
||||
onClick: onRetry,
|
||||
},
|
||||
error?.message || "An error occurred"
|
||||
);
|
||||
});
|
||||
|
||||
describe("Network Error Handling", () => {
|
||||
test("should handle network timeouts gracefully in scheduling screen", async () => {
|
||||
const networkError = new Error("Network timeout");
|
||||
networkError.code = "NETWORK_TIMEOUT";
|
||||
|
||||
mockServices.getAllSchedules.mockRejectedValue(networkError);
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Network timeout");
|
||||
expect(lastFrame()).toContain("Check your internet connection");
|
||||
});
|
||||
|
||||
test("should handle connection refused errors in tag analysis screen", async () => {
|
||||
const connectionError = new Error("Connection refused");
|
||||
connectionError.code = "ECONNREFUSED";
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(connectionError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Connection refused");
|
||||
expect(lastFrame()).toContain("Unable to connect to Shopify");
|
||||
});
|
||||
|
||||
test("should provide retry functionality for network errors", async () => {
|
||||
const networkError = new Error("Network error");
|
||||
|
||||
mockServices.getLogFiles
|
||||
.mockRejectedValueOnce(networkError)
|
||||
.mockResolvedValueOnce([]);
|
||||
|
||||
mockAppState.currentScreen = "viewLogs";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Network error");
|
||||
|
||||
// Retry operation
|
||||
inputHandler("r");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(mockServices.getLogFiles).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test("should implement exponential backoff for repeated network failures", async () => {
|
||||
const networkError = new Error("Network unstable");
|
||||
|
||||
mockServices.fetchAllTags
|
||||
.mockRejectedValueOnce(networkError)
|
||||
.mockRejectedValueOnce(networkError)
|
||||
.mockResolvedValueOnce([]);
|
||||
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// First retry
|
||||
inputHandler("r");
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Second retry (should have longer delay)
|
||||
inputHandler("r");
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(mockServices.fetchAllTags).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("API Error Handling", () => {
|
||||
test("should handle Shopify API rate limiting", async () => {
|
||||
const rateLimitError = new Error("Rate limit exceeded");
|
||||
rateLimitError.code = "RATE_LIMITED";
|
||||
rateLimitError.retryAfter = 5;
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(rateLimitError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Rate limit exceeded");
|
||||
expect(lastFrame()).toContain("Please wait 5 seconds");
|
||||
});
|
||||
|
||||
test("should handle authentication errors", async () => {
|
||||
const authError = new Error("Invalid access token");
|
||||
authError.code = "UNAUTHORIZED";
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(authError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Invalid access token");
|
||||
expect(lastFrame()).toContain("Check your Shopify credentials");
|
||||
expect(lastFrame()).toContain("Go to Configuration");
|
||||
});
|
||||
|
||||
test("should handle API permission errors", async () => {
|
||||
const permissionError = new Error("Insufficient permissions");
|
||||
permissionError.code = "FORBIDDEN";
|
||||
|
||||
mockServices.getTagDetails.mockRejectedValue(permissionError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
mockServices.fetchAllTags.mockResolvedValue([
|
||||
{ tag: "test-tag", productCount: 1, variantCount: 1, totalValue: 100 },
|
||||
]);
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Try to view tag details
|
||||
inputHandler("", { return: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Insufficient permissions");
|
||||
expect(lastFrame()).toContain(
|
||||
"Your API token may not have the required permissions"
|
||||
);
|
||||
});
|
||||
|
||||
test("should handle API version compatibility errors", async () => {
|
||||
const versionError = new Error("API version not supported");
|
||||
versionError.code = "API_VERSION_MISMATCH";
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(versionError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("API version not supported");
|
||||
expect(lastFrame()).toContain("Please update the application");
|
||||
});
|
||||
});
|
||||
|
||||
describe("File System Error Handling", () => {
|
||||
test("should handle missing schedules.json file gracefully", async () => {
|
||||
const fileError = new Error("ENOENT: no such file or directory");
|
||||
fileError.code = "ENOENT";
|
||||
|
||||
mockServices.getAllSchedules.mockRejectedValue(fileError);
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("No schedules found");
|
||||
expect(lastFrame()).toContain("Create your first schedule");
|
||||
});
|
||||
|
||||
test("should handle corrupted log files", async () => {
|
||||
const mockLogFiles = [
|
||||
{ filename: "corrupted.md", size: 1024, operationCount: 5 },
|
||||
];
|
||||
|
||||
mockServices.getLogFiles.mockResolvedValue(mockLogFiles);
|
||||
mockServices.readLogFile.mockResolvedValue("Corrupted content");
|
||||
mockServices.parseLogContent.mockImplementation(() => {
|
||||
throw new Error("Failed to parse log content");
|
||||
});
|
||||
|
||||
mockAppState.currentScreen = "viewLogs";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Select corrupted log file
|
||||
inputHandler("", { return: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Failed to parse log content");
|
||||
expect(lastFrame()).toContain("Showing raw content");
|
||||
});
|
||||
|
||||
test("should handle permission denied errors for file operations", async () => {
|
||||
const permissionError = new Error("Permission denied");
|
||||
permissionError.code = "EACCES";
|
||||
|
||||
mockServices.addSchedule.mockRejectedValue(permissionError);
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
mockServices.getAllSchedules.mockResolvedValue([]);
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Try to create new schedule
|
||||
inputHandler("n");
|
||||
inputHandler("", { return: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Permission denied");
|
||||
expect(lastFrame()).toContain("Check file permissions");
|
||||
});
|
||||
|
||||
test("should handle disk space errors", async () => {
|
||||
const diskSpaceError = new Error("No space left on device");
|
||||
diskSpaceError.code = "ENOSPC";
|
||||
|
||||
mockServices.addSchedule.mockRejectedValue(diskSpaceError);
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
mockServices.getAllSchedules.mockResolvedValue([]);
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Try to create new schedule
|
||||
inputHandler("n");
|
||||
inputHandler("", { return: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("No space left on device");
|
||||
expect(lastFrame()).toContain("Free up disk space");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Validation Error Handling", () => {
|
||||
test("should handle form validation errors in scheduling screen", async () => {
|
||||
mockServices.getAllSchedules.mockResolvedValue([]);
|
||||
|
||||
const validationError = new Error("Invalid schedule data");
|
||||
validationError.code = "VALIDATION_ERROR";
|
||||
validationError.details = {
|
||||
scheduledTime: "Invalid date format",
|
||||
operationType: "Must be 'update' or 'rollback'",
|
||||
};
|
||||
|
||||
mockServices.addSchedule.mockRejectedValue(validationError);
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Try to create invalid schedule
|
||||
inputHandler("n");
|
||||
inputHandler("", { return: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Invalid date format");
|
||||
expect(lastFrame()).toContain("Must be 'update' or 'rollback'");
|
||||
});
|
||||
|
||||
test("should handle configuration validation errors", async () => {
|
||||
const configError = new Error("Invalid configuration");
|
||||
configError.code = "CONFIG_INVALID";
|
||||
|
||||
mockAppState.updateConfiguration.mockImplementation(() => {
|
||||
throw configError;
|
||||
});
|
||||
|
||||
mockServices.fetchAllTags.mockResolvedValue([
|
||||
{ tag: "test-tag", productCount: 1, variantCount: 1, totalValue: 100 },
|
||||
]);
|
||||
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Try to update configuration with invalid tag
|
||||
inputHandler("c");
|
||||
inputHandler("y");
|
||||
|
||||
expect(lastFrame()).toContain("Invalid configuration");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Recovery Mechanisms", () => {
|
||||
test("should automatically retry failed operations with exponential backoff", async () => {
|
||||
const transientError = new Error("Temporary service unavailable");
|
||||
transientError.code = "SERVICE_UNAVAILABLE";
|
||||
|
||||
mockServices.fetchAllTags
|
||||
.mockRejectedValueOnce(transientError)
|
||||
.mockRejectedValueOnce(transientError)
|
||||
.mockResolvedValueOnce([]);
|
||||
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
render(React.createElement(TuiApplication));
|
||||
|
||||
// Should automatically retry
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
expect(mockServices.fetchAllTags).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
test("should provide manual retry option for persistent errors", async () => {
|
||||
const persistentError = new Error("Service down for maintenance");
|
||||
|
||||
mockServices.getAllSchedules
|
||||
.mockRejectedValue(persistentError)
|
||||
.mockRejectedValue(persistentError)
|
||||
.mockResolvedValue([]);
|
||||
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Service down for maintenance");
|
||||
expect(lastFrame()).toContain("Press 'r' to retry");
|
||||
|
||||
// Manual retry
|
||||
inputHandler("r");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(mockServices.getAllSchedules).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test("should fallback to cached data when available", async () => {
|
||||
const networkError = new Error("Network unavailable");
|
||||
|
||||
// Mock cached data
|
||||
mockAppState.getScreenState.mockReturnValue({
|
||||
cachedTags: [
|
||||
{
|
||||
tag: "cached-tag",
|
||||
productCount: 5,
|
||||
variantCount: 15,
|
||||
totalValue: 500,
|
||||
},
|
||||
],
|
||||
lastFetch: Date.now() - 300000, // 5 minutes ago
|
||||
});
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(networkError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("cached-tag");
|
||||
expect(lastFrame()).toContain("Using cached data");
|
||||
expect(lastFrame()).toContain("5 minutes ago");
|
||||
});
|
||||
|
||||
test("should gracefully degrade functionality when services are unavailable", async () => {
|
||||
const serviceError = new Error("All services unavailable");
|
||||
|
||||
mockServices.getAllSchedules.mockRejectedValue(serviceError);
|
||||
mockServices.getLogFiles.mockRejectedValue(serviceError);
|
||||
mockServices.fetchAllTags.mockRejectedValue(serviceError);
|
||||
|
||||
// Test each screen handles degraded mode
|
||||
const screens = ["scheduling", "viewLogs", "tagAnalysis"];
|
||||
|
||||
for (const screen of screens) {
|
||||
mockAppState.currentScreen = screen;
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Service unavailable");
|
||||
expect(lastFrame()).toContain("Limited functionality");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error State Management", () => {
|
||||
test("should clear error state when operation succeeds", async () => {
|
||||
const temporaryError = new Error("Temporary error");
|
||||
|
||||
mockServices.getAllSchedules
|
||||
.mockRejectedValueOnce(temporaryError)
|
||||
.mockResolvedValueOnce([]);
|
||||
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Temporary error");
|
||||
|
||||
// Retry and succeed
|
||||
inputHandler("r");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).not.toContain("Temporary error");
|
||||
expect(lastFrame()).toContain("No schedules found");
|
||||
});
|
||||
|
||||
test("should persist error state across screen navigation", async () => {
|
||||
const persistentError = new Error("Configuration error");
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(persistentError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
let inputHandler;
|
||||
mockUseInput.mockImplementation((handler) => {
|
||||
inputHandler = handler;
|
||||
});
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Configuration error");
|
||||
|
||||
// Navigate away and back
|
||||
inputHandler("", { escape: true });
|
||||
mockAppState.currentScreen = "main";
|
||||
|
||||
// Navigate back to tag analysis
|
||||
inputHandler("t");
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
// Error should be saved in screen state
|
||||
expect(mockAppState.saveScreenState).toHaveBeenCalledWith(
|
||||
"tagAnalysis",
|
||||
expect.objectContaining({
|
||||
error: expect.any(Object),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test("should provide error context and troubleshooting guidance", async () => {
|
||||
const contextualError = new Error("Shop not found");
|
||||
contextualError.code = "SHOP_NOT_FOUND";
|
||||
contextualError.context = {
|
||||
shopDomain: "invalid-shop.myshopify.com",
|
||||
suggestion: "Verify your shop domain in configuration",
|
||||
};
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(contextualError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Shop not found");
|
||||
expect(lastFrame()).toContain("invalid-shop.myshopify.com");
|
||||
expect(lastFrame()).toContain("Verify your shop domain");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Critical Error Handling", () => {
|
||||
test("should handle application crashes gracefully", async () => {
|
||||
const criticalError = new Error("Critical system error");
|
||||
criticalError.code = "CRITICAL";
|
||||
|
||||
// Mock a critical error that would crash the app
|
||||
mockServices.getAllSchedules.mockImplementation(() => {
|
||||
throw criticalError;
|
||||
});
|
||||
|
||||
mockAppState.currentScreen = "scheduling";
|
||||
|
||||
// Should not crash the entire application
|
||||
expect(() => {
|
||||
render(React.createElement(TuiApplication));
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
test("should provide safe mode when multiple services fail", async () => {
|
||||
const systemError = new Error("System failure");
|
||||
|
||||
// All services fail
|
||||
Object.keys(mockServices).forEach((service) => {
|
||||
mockServices[service].mockRejectedValue(systemError);
|
||||
});
|
||||
|
||||
mockAppState.currentScreen = "main";
|
||||
|
||||
const { lastFrame } = render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(lastFrame()).toContain("Safe mode");
|
||||
expect(lastFrame()).toContain("Limited functionality available");
|
||||
});
|
||||
|
||||
test("should log critical errors for debugging", async () => {
|
||||
const criticalError = new Error("Memory allocation failed");
|
||||
criticalError.code = "ENOMEM";
|
||||
|
||||
mockServices.fetchAllTags.mockRejectedValue(criticalError);
|
||||
mockAppState.currentScreen = "tagAnalysis";
|
||||
|
||||
// Mock console.error to capture error logging
|
||||
const consoleSpy = jest
|
||||
.spyOn(console, "error")
|
||||
.mockImplementation(() => {});
|
||||
|
||||
render(React.createElement(TuiApplication));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Critical error"),
|
||||
expect.any(Error)
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,642 +0,0 @@
|
||||
const ScheduleService = require("../../../src/tui/services/ScheduleService.js");
|
||||
const LogService = require("../../../src/tui/services/LogService.js");
|
||||
const TagAnalysisService = require("../../../src/tui/services/TagAnalysisService.js");
|
||||
|
||||
// Integration tests focusing on service workflows and data flow
|
||||
describe("TUI Screen Workflows Integration Tests", () => {
|
||||
let scheduleService;
|
||||
let logService;
|
||||
let tagAnalysisService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create fresh service instances for each test
|
||||
scheduleService = new ScheduleService();
|
||||
logService = new LogService();
|
||||
tagAnalysisService = new TagAnalysisService();
|
||||
|
||||
// Mock file system operations
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockImplementation(() => Promise.resolve("[]"));
|
||||
jest
|
||||
.spyOn(require("fs").promises, "writeFile")
|
||||
.mockImplementation(() => Promise.resolve());
|
||||
jest
|
||||
.spyOn(require("fs").promises, "access")
|
||||
.mockImplementation(() => Promise.resolve());
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readdir")
|
||||
.mockImplementation(() => Promise.resolve([]));
|
||||
jest.spyOn(require("fs").promises, "stat").mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
size: 1024,
|
||||
mtime: new Date(),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("Scheduling Screen Workflow", () => {
|
||||
test("should create, read, update, and delete schedules", async () => {
|
||||
// Test schedule creation
|
||||
const futureDate = new Date(
|
||||
Date.now() + 24 * 60 * 60 * 1000
|
||||
).toISOString(); // 24 hours from now
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: futureDate,
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: {
|
||||
targetTag: "test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
priceAdjustmentPercentage: 10,
|
||||
},
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
expect(createdSchedule).toHaveProperty("id");
|
||||
expect(createdSchedule.operationType).toBe("update");
|
||||
|
||||
// Test schedule reading
|
||||
const allSchedules = await scheduleService.getAllSchedules();
|
||||
expect(Array.isArray(allSchedules)).toBe(true);
|
||||
|
||||
// Test schedule updating
|
||||
const updatedSchedule = await scheduleService.updateSchedule(
|
||||
createdSchedule.id,
|
||||
{
|
||||
...createdSchedule,
|
||||
operationType: "rollback",
|
||||
}
|
||||
);
|
||||
expect(updatedSchedule.operationType).toBe("rollback");
|
||||
|
||||
// Test schedule deletion
|
||||
const deleteResult = await scheduleService.deleteSchedule(
|
||||
createdSchedule.id
|
||||
);
|
||||
expect(deleteResult).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate schedule data correctly", async () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: "invalid-date",
|
||||
recurrence: "invalid",
|
||||
};
|
||||
|
||||
await expect(
|
||||
scheduleService.addSchedule(invalidSchedule)
|
||||
).rejects.toThrow("Invalid schedule data");
|
||||
});
|
||||
|
||||
test("should handle concurrent schedule operations", async () => {
|
||||
const schedule1 = {
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-01-15T10:00:00Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
const schedule2 = {
|
||||
operationType: "rollback",
|
||||
scheduledTime: "2024-01-16T10:00:00Z",
|
||||
recurrence: "daily",
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
// Create schedules concurrently
|
||||
const [created1, created2] = await Promise.all([
|
||||
scheduleService.addSchedule(schedule1),
|
||||
scheduleService.addSchedule(schedule2),
|
||||
]);
|
||||
|
||||
expect(created1.id).not.toBe(created2.id);
|
||||
expect(created1.operationType).toBe("update");
|
||||
expect(created2.operationType).toBe("rollback");
|
||||
});
|
||||
});
|
||||
|
||||
describe("View Logs Screen Workflow", () => {
|
||||
test("should discover and read log files", async () => {
|
||||
// Mock log files
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readdir")
|
||||
.mockResolvedValue([
|
||||
"Progress-2024-01-15.md",
|
||||
"Progress-2024-01-14.md",
|
||||
"other-file.txt",
|
||||
]);
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
expect(logFiles).toHaveLength(2); // Should filter out non-log files
|
||||
expect(logFiles[0].filename).toBe("Progress-2024-01-15.md");
|
||||
});
|
||||
|
||||
test("should parse log content correctly", async () => {
|
||||
const mockLogContent = `# Operation Log - 2024-01-15
|
||||
|
||||
## Operation Start
|
||||
- Target Tag: test-tag
|
||||
- Operation: update
|
||||
- Timestamp: 2024-01-15T10:00:00Z
|
||||
|
||||
## Product Updates
|
||||
- Product 1: Updated price from $10.00 to $11.00
|
||||
- Product 2: Updated price from $20.00 to $22.00
|
||||
|
||||
## Operation Complete
|
||||
- Total products updated: 2
|
||||
- Duration: 30 seconds`;
|
||||
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockResolvedValue(mockLogContent);
|
||||
|
||||
const logContent = await logService.readLogFile("Progress-2024-01-15.md");
|
||||
const parsedLogs = logService.parseLogContent(logContent);
|
||||
|
||||
expect(parsedLogs).toHaveLength(4); // Start, 2 updates, complete
|
||||
expect(parsedLogs[0].type).toBe("operation_start");
|
||||
expect(parsedLogs[1].type).toBe("product_update");
|
||||
expect(parsedLogs[3].type).toBe("completion");
|
||||
});
|
||||
|
||||
test("should filter logs by criteria", async () => {
|
||||
const mockLogs = [
|
||||
{
|
||||
timestamp: "2024-01-15T10:00:00Z",
|
||||
type: "operation_start",
|
||||
operationType: "update",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:01:00Z",
|
||||
type: "product_update",
|
||||
operationType: "update",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:02:00Z",
|
||||
type: "operation_start",
|
||||
operationType: "rollback",
|
||||
},
|
||||
{
|
||||
timestamp: "2024-01-15T10:03:00Z",
|
||||
type: "error",
|
||||
operationType: "update",
|
||||
},
|
||||
];
|
||||
|
||||
const filteredLogs = logService.filterLogs(mockLogs, {
|
||||
operationType: "update",
|
||||
status: "all",
|
||||
dateRange: "all",
|
||||
});
|
||||
|
||||
expect(filteredLogs).toHaveLength(3);
|
||||
expect(filteredLogs.every((log) => log.operationType === "update")).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
test("should paginate large log datasets", async () => {
|
||||
const largeLogs = Array.from({ length: 100 }, (_, i) => ({
|
||||
timestamp: `2024-01-15T10:${i.toString().padStart(2, "0")}:00Z`,
|
||||
type: "product_update",
|
||||
message: `Log entry ${i + 1}`,
|
||||
}));
|
||||
|
||||
const page1 = logService.paginateLogs(largeLogs, 0, 20);
|
||||
const page2 = logService.paginateLogs(largeLogs, 1, 20);
|
||||
|
||||
expect(page1.logs).toHaveLength(20);
|
||||
expect(page2.logs).toHaveLength(20);
|
||||
expect(page1.totalPages).toBe(5);
|
||||
expect(page1.logs[0].message).toBe("Log entry 1");
|
||||
expect(page2.logs[0].message).toBe("Log entry 21");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Tag Analysis Screen Workflow", () => {
|
||||
test("should fetch and analyze tags", async () => {
|
||||
// Mock Shopify service
|
||||
const mockShopifyService = {
|
||||
fetchAllProducts: jest.fn().mockResolvedValue([
|
||||
{
|
||||
id: "1",
|
||||
title: "Product 1",
|
||||
tags: ["summer-sale", "clothing"],
|
||||
variants: [
|
||||
{ id: "v1", price: "50.00", title: "Small" },
|
||||
{ id: "v2", price: "55.00", title: "Medium" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
title: "Product 2",
|
||||
tags: ["summer-sale", "accessories"],
|
||||
variants: [{ id: "v3", price: "25.00", title: "One Size" }],
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
// Inject mock service
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
|
||||
const tags = await tagAnalysisService.fetchAllTags();
|
||||
expect(tags).toHaveLength(3); // summer-sale, clothing, accessories
|
||||
|
||||
const summerSaleTag = tags.find((tag) => tag.tag === "summer-sale");
|
||||
expect(summerSaleTag.productCount).toBe(2);
|
||||
expect(summerSaleTag.variantCount).toBe(3);
|
||||
expect(summerSaleTag.totalValue).toBe(130.0);
|
||||
});
|
||||
|
||||
test("should get detailed tag information", async () => {
|
||||
const mockShopifyService = {
|
||||
fetchProductsByTag: jest.fn().mockResolvedValue([
|
||||
{
|
||||
id: "1",
|
||||
title: "Summer Dress",
|
||||
variants: [
|
||||
{ id: "v1", price: "75.00", title: "Small" },
|
||||
{ id: "v2", price: "75.00", title: "Medium" },
|
||||
],
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
|
||||
const tagDetails = await tagAnalysisService.getTagDetails("summer-sale");
|
||||
expect(tagDetails.tag).toBe("summer-sale");
|
||||
expect(tagDetails.products).toHaveLength(1);
|
||||
expect(tagDetails.statistics.totalValue).toBe(150.0);
|
||||
});
|
||||
|
||||
test("should calculate tag statistics correctly", async () => {
|
||||
const mockProducts = [
|
||||
{
|
||||
id: "1",
|
||||
title: "Product 1",
|
||||
variants: [
|
||||
{ id: "v1", price: "100.00" },
|
||||
{ id: "v2", price: "150.00" },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
title: "Product 2",
|
||||
variants: [{ id: "v3", price: "50.00" }],
|
||||
},
|
||||
];
|
||||
|
||||
const statistics =
|
||||
tagAnalysisService.calculateTagStatistics(mockProducts);
|
||||
expect(statistics.productCount).toBe(2);
|
||||
expect(statistics.variantCount).toBe(3);
|
||||
expect(statistics.totalValue).toBe(300.0);
|
||||
expect(statistics.averagePrice).toBe(100.0);
|
||||
expect(statistics.priceRange.min).toBe(50.0);
|
||||
expect(statistics.priceRange.max).toBe(150.0);
|
||||
});
|
||||
|
||||
test("should search tags by query", async () => {
|
||||
const mockTags = [
|
||||
{ tag: "summer-sale", productCount: 10 },
|
||||
{ tag: "winter-collection", productCount: 8 },
|
||||
{ tag: "spring-new", productCount: 5 },
|
||||
{ tag: "summer-dress", productCount: 3 },
|
||||
];
|
||||
|
||||
const searchResults = tagAnalysisService.searchTags(mockTags, "summer");
|
||||
expect(searchResults).toHaveLength(2);
|
||||
expect(searchResults.every((tag) => tag.tag.includes("summer"))).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cross-Screen Data Integration", () => {
|
||||
test("should create schedule with tag from analysis", async () => {
|
||||
// Simulate tag analysis workflow
|
||||
const mockShopifyService = {
|
||||
fetchAllProducts: jest.fn().mockResolvedValue([
|
||||
{
|
||||
id: "1",
|
||||
title: "Product 1",
|
||||
tags: ["selected-tag"],
|
||||
variants: [{ id: "v1", price: "50.00" }],
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
const tags = await tagAnalysisService.fetchAllTags();
|
||||
const selectedTag = tags[0];
|
||||
|
||||
// Create schedule using selected tag
|
||||
const schedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-01-15T10:00:00Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: {
|
||||
targetTag: selectedTag.tag,
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
priceAdjustmentPercentage: 10,
|
||||
},
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(schedule);
|
||||
expect(createdSchedule.config.targetTag).toBe("selected-tag");
|
||||
});
|
||||
|
||||
test("should log scheduled operations for view logs screen", async () => {
|
||||
// Create a schedule
|
||||
const schedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-01-15T10:00:00Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: {
|
||||
targetTag: "test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
},
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(schedule);
|
||||
|
||||
// Simulate schedule execution logging
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
type: "scheduled_operation",
|
||||
scheduleId: createdSchedule.id,
|
||||
operationType: schedule.operationType,
|
||||
targetTag: schedule.config.targetTag,
|
||||
message: "Scheduled operation executed successfully",
|
||||
};
|
||||
|
||||
// Mock log content that would be created by scheduled operation
|
||||
const mockLogContent = `# Scheduled Operation Log - ${
|
||||
new Date().toISOString().split("T")[0]
|
||||
}
|
||||
|
||||
## Schedule ID: ${createdSchedule.id}
|
||||
## Operation: ${schedule.operationType}
|
||||
## Target Tag: ${schedule.config.targetTag}
|
||||
## Execution Time: ${logEntry.timestamp}
|
||||
|
||||
## Results
|
||||
- Operation completed successfully
|
||||
- Products processed: 5
|
||||
- Duration: 45 seconds`;
|
||||
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockResolvedValue(mockLogContent);
|
||||
|
||||
const logContent = await logService.readLogFile("scheduled-operation.md");
|
||||
expect(logContent).toContain(createdSchedule.id);
|
||||
expect(logContent).toContain(schedule.config.targetTag);
|
||||
});
|
||||
|
||||
test("should maintain configuration consistency across screens", async () => {
|
||||
const testConfig = {
|
||||
targetTag: "integration-test-tag",
|
||||
shopDomain: "test-shop.myshopify.com",
|
||||
accessToken: "test-token",
|
||||
priceAdjustmentPercentage: 15,
|
||||
operationMode: "update",
|
||||
};
|
||||
|
||||
// Test that schedule uses current configuration
|
||||
const schedule = {
|
||||
operationType: testConfig.operationMode,
|
||||
scheduledTime: "2024-01-15T10:00:00Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: testConfig,
|
||||
};
|
||||
|
||||
const createdSchedule = await scheduleService.addSchedule(schedule);
|
||||
expect(createdSchedule.config.targetTag).toBe(testConfig.targetTag);
|
||||
expect(createdSchedule.config.priceAdjustmentPercentage).toBe(
|
||||
testConfig.priceAdjustmentPercentage
|
||||
);
|
||||
|
||||
// Test that tag analysis can update configuration
|
||||
const mockShopifyService = {
|
||||
fetchAllProducts: jest.fn().mockResolvedValue([
|
||||
{
|
||||
id: "1",
|
||||
title: "Product 1",
|
||||
tags: ["new-target-tag"],
|
||||
variants: [{ id: "v1", price: "50.00" }],
|
||||
},
|
||||
]),
|
||||
};
|
||||
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
const tags = await tagAnalysisService.fetchAllTags();
|
||||
const newTargetTag = tags[0];
|
||||
|
||||
// Simulate configuration update from tag analysis
|
||||
const updatedConfig = {
|
||||
...testConfig,
|
||||
targetTag: newTargetTag.tag,
|
||||
};
|
||||
|
||||
// Verify new schedules use updated configuration
|
||||
const newSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-01-16T10:00:00Z",
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
config: updatedConfig,
|
||||
};
|
||||
|
||||
const newCreatedSchedule = await scheduleService.addSchedule(newSchedule);
|
||||
expect(newCreatedSchedule.config.targetTag).toBe("new-target-tag");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling and Recovery", () => {
|
||||
test("should handle service failures gracefully", async () => {
|
||||
// Test schedule service error handling
|
||||
jest
|
||||
.spyOn(require("fs").promises, "writeFile")
|
||||
.mockRejectedValue(new Error("Disk full"));
|
||||
|
||||
await expect(
|
||||
scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: "2024-01-15T10:00:00Z",
|
||||
recurrence: "once",
|
||||
})
|
||||
).rejects.toThrow("Disk full");
|
||||
|
||||
// Test log service error handling
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockRejectedValue(new Error("File not found"));
|
||||
|
||||
await expect(logService.readLogFile("nonexistent.md")).rejects.toThrow(
|
||||
"File not found"
|
||||
);
|
||||
|
||||
// Test tag analysis service error handling
|
||||
const mockShopifyService = {
|
||||
fetchAllProducts: jest
|
||||
.fn()
|
||||
.mockRejectedValue(new Error("API rate limited")),
|
||||
};
|
||||
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
await expect(tagAnalysisService.fetchAllTags()).rejects.toThrow(
|
||||
"API rate limited"
|
||||
);
|
||||
});
|
||||
|
||||
test("should provide fallback data when services are unavailable", async () => {
|
||||
// Test schedule service fallback
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockRejectedValue(new Error("ENOENT"));
|
||||
|
||||
const schedules = await scheduleService.getAllSchedules();
|
||||
expect(Array.isArray(schedules)).toBe(true);
|
||||
expect(schedules).toHaveLength(0); // Should return empty array as fallback
|
||||
|
||||
// Test log service fallback
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readdir")
|
||||
.mockRejectedValue(new Error("Permission denied"));
|
||||
|
||||
const logFiles = await logService.getLogFiles();
|
||||
expect(Array.isArray(logFiles)).toBe(true);
|
||||
expect(logFiles).toHaveLength(0); // Should return empty array as fallback
|
||||
});
|
||||
|
||||
test("should validate data integrity across operations", async () => {
|
||||
// Test invalid schedule data
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid-operation",
|
||||
scheduledTime: "not-a-date",
|
||||
recurrence: "invalid-recurrence",
|
||||
};
|
||||
|
||||
await expect(
|
||||
scheduleService.addSchedule(invalidSchedule)
|
||||
).rejects.toThrow(/Invalid schedule data/);
|
||||
|
||||
// Test corrupted log parsing
|
||||
const corruptedLogContent = "This is not valid log content";
|
||||
const parsedLogs = logService.parseLogContent(corruptedLogContent);
|
||||
expect(Array.isArray(parsedLogs)).toBe(true);
|
||||
expect(parsedLogs).toHaveLength(0); // Should handle gracefully
|
||||
|
||||
// Test invalid tag data
|
||||
const invalidProducts = null;
|
||||
const statistics =
|
||||
tagAnalysisService.calculateTagStatistics(invalidProducts);
|
||||
expect(statistics.productCount).toBe(0);
|
||||
expect(statistics.variantCount).toBe(0);
|
||||
expect(statistics.totalValue).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Performance and Scalability", () => {
|
||||
test("should handle large datasets efficiently", async () => {
|
||||
// Test large schedule list
|
||||
const largeScheduleList = Array.from({ length: 1000 }, (_, i) => ({
|
||||
id: `schedule-${i}`,
|
||||
operationType: i % 2 === 0 ? "update" : "rollback",
|
||||
scheduledTime: new Date(Date.now() + i * 3600000).toISOString(),
|
||||
recurrence: "once",
|
||||
enabled: true,
|
||||
}));
|
||||
|
||||
jest
|
||||
.spyOn(require("fs").promises, "readFile")
|
||||
.mockResolvedValue(JSON.stringify(largeScheduleList));
|
||||
|
||||
const startTime = Date.now();
|
||||
const schedules = await scheduleService.getAllSchedules();
|
||||
const endTime = Date.now();
|
||||
|
||||
expect(schedules).toHaveLength(1000);
|
||||
expect(endTime - startTime).toBeLessThan(1000); // Should complete within 1 second
|
||||
|
||||
// Test large log file parsing
|
||||
const largeLogContent = Array.from(
|
||||
{ length: 10000 },
|
||||
(_, i) =>
|
||||
`## Log Entry ${i + 1}\n- Timestamp: 2024-01-15T10:${(i % 60)
|
||||
.toString()
|
||||
.padStart(2, "0")}:00Z\n- Message: Product ${i + 1} updated`
|
||||
).join("\n\n");
|
||||
|
||||
const parseStartTime = Date.now();
|
||||
const parsedLogs = logService.parseLogContent(largeLogContent);
|
||||
const parseEndTime = Date.now();
|
||||
|
||||
expect(parsedLogs.length).toBeGreaterThan(0);
|
||||
expect(parseEndTime - parseStartTime).toBeLessThan(2000); // Should complete within 2 seconds
|
||||
|
||||
// Test large tag dataset
|
||||
const largeProductList = Array.from({ length: 5000 }, (_, i) => ({
|
||||
id: `product-${i}`,
|
||||
title: `Product ${i}`,
|
||||
tags: [`tag-${i % 100}`, `category-${i % 20}`],
|
||||
variants: [
|
||||
{
|
||||
id: `variant-${i}-1`,
|
||||
price: (Math.random() * 100 + 10).toFixed(2),
|
||||
},
|
||||
{
|
||||
id: `variant-${i}-2`,
|
||||
price: (Math.random() * 100 + 10).toFixed(2),
|
||||
},
|
||||
],
|
||||
}));
|
||||
|
||||
const mockShopifyService = {
|
||||
fetchAllProducts: jest.fn().mockResolvedValue(largeProductList),
|
||||
};
|
||||
|
||||
tagAnalysisService.shopifyService = mockShopifyService;
|
||||
|
||||
const tagStartTime = Date.now();
|
||||
const tags = await tagAnalysisService.fetchAllTags();
|
||||
const tagEndTime = Date.now();
|
||||
|
||||
expect(tags.length).toBeGreaterThan(0);
|
||||
expect(tagEndTime - tagStartTime).toBeLessThan(3000); // Should complete within 3 seconds
|
||||
});
|
||||
|
||||
test("should manage memory efficiently with large datasets", async () => {
|
||||
// Test memory usage doesn't grow excessively
|
||||
const initialMemory = process.memoryUsage().heapUsed;
|
||||
|
||||
// Process large dataset multiple times
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const largeProducts = Array.from({ length: 1000 }, (_, j) => ({
|
||||
id: `product-${j}`,
|
||||
variants: [{ id: `variant-${j}`, price: "50.00" }],
|
||||
}));
|
||||
|
||||
tagAnalysisService.calculateTagStatistics(largeProducts);
|
||||
}
|
||||
|
||||
const finalMemory = process.memoryUsage().heapUsed;
|
||||
const memoryIncrease = finalMemory - initialMemory;
|
||||
|
||||
// Memory increase should be reasonable (less than 50MB)
|
||||
expect(memoryIncrease).toBeLessThan(50 * 1024 * 1024);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,259 +0,0 @@
|
||||
const LogService = require("../../../src/tui/services/LogService.js");
|
||||
|
||||
describe("LogService Performance Optimizations", () => {
|
||||
let service;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new LogService("test-progress.md");
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (service) {
|
||||
service.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
describe("efficient pagination", () => {
|
||||
it("should paginate logs efficiently", () => {
|
||||
const logs = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `log_${i}`,
|
||||
timestamp: new Date(),
|
||||
title: `Log Entry ${i}`,
|
||||
message: `Message ${i}`,
|
||||
level: "INFO",
|
||||
}));
|
||||
|
||||
const result = service.paginateLogs(logs, 2, 10); // Page 2, 10 items per page
|
||||
|
||||
expect(result.entries).toHaveLength(10);
|
||||
expect(result.pagination.currentPage).toBe(2);
|
||||
expect(result.pagination.totalPages).toBe(10);
|
||||
expect(result.pagination.hasNextPage).toBe(true);
|
||||
expect(result.pagination.hasPreviousPage).toBe(true);
|
||||
expect(result.pagination.startIndex).toBe(21); // 1-based index
|
||||
expect(result.pagination.endIndex).toBe(30);
|
||||
});
|
||||
|
||||
it("should handle edge cases in pagination", () => {
|
||||
const logs = Array.from({ length: 5 }, (_, i) => ({
|
||||
id: `log_${i}`,
|
||||
timestamp: new Date(),
|
||||
title: `Log Entry ${i}`,
|
||||
message: `Message ${i}`,
|
||||
level: "INFO",
|
||||
}));
|
||||
|
||||
// Last page
|
||||
const result = service.paginateLogs(logs, 0, 10);
|
||||
|
||||
expect(result.entries).toHaveLength(5);
|
||||
expect(result.pagination.totalPages).toBe(1);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("streaming for large files", () => {
|
||||
it("should parse log content in streaming mode", async () => {
|
||||
const mockContent = "Test log content";
|
||||
|
||||
const result = await service.parseLogContentStreaming(
|
||||
mockContent,
|
||||
{
|
||||
dateRange: "all",
|
||||
operationType: "all",
|
||||
status: "all",
|
||||
searchTerm: "",
|
||||
},
|
||||
0,
|
||||
10
|
||||
);
|
||||
|
||||
expect(result).toHaveProperty("entries");
|
||||
expect(result).toHaveProperty("totalCount");
|
||||
expect(Array.isArray(result.entries)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("caching optimizations", () => {
|
||||
it("should track cache statistics", () => {
|
||||
const stats = service.getCacheStats();
|
||||
|
||||
expect(stats).toHaveProperty("size");
|
||||
expect(stats).toHaveProperty("keys");
|
||||
expect(typeof stats.size).toBe("number");
|
||||
expect(Array.isArray(stats.keys)).toBe(true);
|
||||
});
|
||||
|
||||
it("should provide memory usage statistics", () => {
|
||||
const stats = service.getMemoryStats();
|
||||
|
||||
expect(stats).toHaveProperty("cacheEntries");
|
||||
expect(stats).toHaveProperty("estimatedSizeBytes");
|
||||
expect(stats).toHaveProperty("estimatedSizeMB");
|
||||
expect(stats).toHaveProperty("maxEntries");
|
||||
expect(stats).toHaveProperty("cacheHitRatio");
|
||||
});
|
||||
});
|
||||
|
||||
describe("memory management", () => {
|
||||
it("should clean up expired cache entries", () => {
|
||||
// Add some cache entries with old timestamps
|
||||
service.cache.set("old_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
|
||||
});
|
||||
|
||||
service.cache.set("new_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
expect(service.cache.size).toBe(2);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBe(1);
|
||||
expect(service.cache.has("new_entry")).toBe(true);
|
||||
expect(service.cache.has("old_entry")).toBe(false);
|
||||
});
|
||||
|
||||
it("should limit cache size to prevent memory issues", () => {
|
||||
// Fill cache beyond limit
|
||||
for (let i = 0; i < 40; i++) {
|
||||
service.cache.set(`entry_${i}`, {
|
||||
data: { large: "data".repeat(1000) },
|
||||
timestamp: Date.now() - i * 1000, // Different timestamps
|
||||
});
|
||||
}
|
||||
|
||||
expect(service.cache.size).toBeGreaterThan(30);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBeLessThanOrEqual(30);
|
||||
});
|
||||
|
||||
it("should clean up resources on destroy", () => {
|
||||
service.destroy();
|
||||
|
||||
expect(service.cache.size).toBe(0);
|
||||
expect(service.cleanupInterval).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("filtering optimizations", () => {
|
||||
it("should filter logs efficiently", () => {
|
||||
const logs = [
|
||||
{
|
||||
id: "log_1",
|
||||
timestamp: new Date("2024-01-01"),
|
||||
title: "Update Product A",
|
||||
message: "Product updated successfully",
|
||||
level: "SUCCESS",
|
||||
type: "update",
|
||||
details: "Product A details",
|
||||
productTitle: "Product A",
|
||||
},
|
||||
{
|
||||
id: "log_2",
|
||||
timestamp: new Date("2024-01-02"),
|
||||
title: "Error Product B",
|
||||
message: "Product update failed",
|
||||
level: "ERROR",
|
||||
type: "update",
|
||||
details: "Product B error details",
|
||||
productTitle: "Product B",
|
||||
},
|
||||
{
|
||||
id: "log_3",
|
||||
timestamp: new Date("2024-01-03"),
|
||||
title: "Rollback Product C",
|
||||
message: "Product rollback completed",
|
||||
level: "INFO",
|
||||
type: "rollback",
|
||||
details: "Product C rollback details",
|
||||
productTitle: "Product C",
|
||||
},
|
||||
];
|
||||
|
||||
// Filter by operation type
|
||||
const updateLogs = service.filterLogs(logs, { operationType: "update" });
|
||||
expect(updateLogs).toHaveLength(2);
|
||||
|
||||
// Filter by status
|
||||
const errorLogs = service.filterLogs(logs, { status: "error" });
|
||||
expect(errorLogs).toHaveLength(1);
|
||||
expect(errorLogs[0].level).toBe("ERROR");
|
||||
|
||||
// Filter by search term
|
||||
const productALogs = service.filterLogs(logs, {
|
||||
searchTerm: "Product A",
|
||||
});
|
||||
expect(productALogs).toHaveLength(1);
|
||||
expect(productALogs[0].productTitle).toBe("Product A");
|
||||
});
|
||||
|
||||
it("should handle date range filtering", () => {
|
||||
const now = new Date();
|
||||
const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||
const lastWeek = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
const logs = [
|
||||
{
|
||||
id: "log_1",
|
||||
timestamp: now,
|
||||
title: "Recent Log",
|
||||
message: "Recent message",
|
||||
level: "INFO",
|
||||
},
|
||||
{
|
||||
id: "log_2",
|
||||
timestamp: yesterday,
|
||||
title: "Yesterday Log",
|
||||
message: "Yesterday message",
|
||||
level: "INFO",
|
||||
},
|
||||
{
|
||||
id: "log_3",
|
||||
timestamp: lastWeek,
|
||||
title: "Old Log",
|
||||
message: "Old message",
|
||||
level: "INFO",
|
||||
},
|
||||
];
|
||||
|
||||
// Filter by today
|
||||
const todayLogs = service.filterLogs(logs, { dateRange: "today" });
|
||||
expect(todayLogs).toHaveLength(1);
|
||||
expect(todayLogs[0].title).toBe("Recent Log");
|
||||
|
||||
// Filter by week
|
||||
const weekLogs = service.filterLogs(logs, { dateRange: "week" });
|
||||
expect(weekLogs.length).toBeGreaterThanOrEqual(2); // Should include recent and yesterday
|
||||
});
|
||||
});
|
||||
|
||||
describe("preloading", () => {
|
||||
it("should preload next page without blocking", async () => {
|
||||
const options = {
|
||||
page: 0,
|
||||
pageSize: 10,
|
||||
dateRange: "all",
|
||||
operationType: "all",
|
||||
status: "all",
|
||||
searchTerm: "",
|
||||
};
|
||||
|
||||
// Mock the getFilteredLogs method to avoid actual file operations
|
||||
service.getFilteredLogs = jest.fn().mockResolvedValue({
|
||||
entries: [],
|
||||
pagination: { hasNextPage: true },
|
||||
});
|
||||
|
||||
// Preload should not throw errors
|
||||
await expect(service.preloadNextPage(options)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* Basic ScheduleService Tests
|
||||
* Tests for core functionality
|
||||
*/
|
||||
|
||||
const fs = require("fs");
|
||||
const ScheduleService = require("../../../src/tui/services/ScheduleService");
|
||||
|
||||
describe("ScheduleService Basic Tests", () => {
|
||||
let scheduleService;
|
||||
const testSchedulesFile = "test-schedules-basic.json";
|
||||
|
||||
beforeEach(() => {
|
||||
scheduleService = new ScheduleService();
|
||||
scheduleService.schedulesFile = testSchedulesFile;
|
||||
scheduleService.lockFile = `${testSchedulesFile}.lock`;
|
||||
|
||||
// Clean up any existing test files
|
||||
try {
|
||||
fs.unlinkSync(testSchedulesFile);
|
||||
} catch (error) {
|
||||
// File doesn't exist, which is fine
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Remove test files
|
||||
try {
|
||||
fs.unlinkSync(testSchedulesFile);
|
||||
} catch (error) {
|
||||
// File doesn't exist, which is fine
|
||||
}
|
||||
});
|
||||
|
||||
test("should validate schedule data", () => {
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
description: "Test schedule",
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(validSchedule)
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
test("should reject invalid operation types", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(invalidSchedule)
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
test("should calculate checksum correctly", () => {
|
||||
const data = [{ id: "1", name: "test" }];
|
||||
const checksum1 = scheduleService.calculateChecksum(data);
|
||||
const checksum2 = scheduleService.calculateChecksum(data);
|
||||
|
||||
expect(checksum1).toBe(checksum2);
|
||||
expect(typeof checksum1).toBe("string");
|
||||
expect(checksum1.length).toBe(32); // MD5 hash length
|
||||
});
|
||||
|
||||
test("should provide service statistics", () => {
|
||||
const stats = scheduleService.getServiceStats();
|
||||
|
||||
expect(stats).toHaveProperty("schedulesLoaded");
|
||||
expect(stats).toHaveProperty("schedulesCount");
|
||||
expect(stats).toHaveProperty("activeSchedules");
|
||||
expect(stats).toHaveProperty("pendingOperations");
|
||||
expect(stats).toHaveProperty("memoryUsage");
|
||||
});
|
||||
});
|
||||
@@ -1,374 +0,0 @@
|
||||
/**
|
||||
* Enhanced ScheduleService Tests
|
||||
* Tests for data persistence, state management, and concurrent access
|
||||
* Requirements: 5.1, 5.4, 5.6
|
||||
*/
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const ScheduleService = require("../../../src/tui/services/ScheduleService");
|
||||
|
||||
describe("ScheduleService Enhanced Features", () => {
|
||||
let scheduleService;
|
||||
const testSchedulesFile = "test-schedules.json";
|
||||
const testLockFile = "test-schedules.json.lock";
|
||||
|
||||
beforeEach(() => {
|
||||
// Create service with test file
|
||||
scheduleService = new ScheduleService();
|
||||
scheduleService.schedulesFile = testSchedulesFile;
|
||||
scheduleService.lockFile = testLockFile;
|
||||
|
||||
// Clean up any existing test files
|
||||
[
|
||||
testSchedulesFile,
|
||||
testLockFile,
|
||||
`${testSchedulesFile}.backup`,
|
||||
`${testSchedulesFile}.tmp.${Date.now()}`,
|
||||
].forEach((file) => {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
} catch (error) {
|
||||
// File doesn't exist, which is fine
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Cleanup
|
||||
await scheduleService.cleanup();
|
||||
|
||||
// Remove test files
|
||||
[testSchedulesFile, testLockFile, `${testSchedulesFile}.backup`].forEach(
|
||||
(file) => {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
} catch (error) {
|
||||
// File doesn't exist, which is fine
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe("Data Persistence", () => {
|
||||
test("should save schedules with metadata and checksum", async () => {
|
||||
const testSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(), // Tomorrow
|
||||
recurrence: "once",
|
||||
description: "Test schedule",
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
const savedSchedule = await scheduleService.addSchedule(testSchedule);
|
||||
expect(savedSchedule.id).toBeDefined();
|
||||
|
||||
// Check file structure
|
||||
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
||||
const parsedData = JSON.parse(fileContent);
|
||||
|
||||
expect(parsedData.version).toBe("1.0");
|
||||
expect(parsedData.lastModified).toBeDefined();
|
||||
expect(parsedData.schedules).toHaveLength(1);
|
||||
expect(parsedData.metadata.totalSchedules).toBe(1);
|
||||
expect(parsedData.metadata.checksum).toBeDefined();
|
||||
});
|
||||
|
||||
test("should create backup before saving", async () => {
|
||||
// Create initial schedule
|
||||
const schedule1 = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
description: "First schedule",
|
||||
};
|
||||
|
||||
await scheduleService.addSchedule(schedule1);
|
||||
|
||||
// Add another schedule (should create backup)
|
||||
const schedule2 = {
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 172800000).toISOString(),
|
||||
recurrence: "once",
|
||||
description: "Second schedule",
|
||||
};
|
||||
|
||||
await scheduleService.addSchedule(schedule2);
|
||||
|
||||
// Check that backup exists
|
||||
expect(fs.existsSync(`${testSchedulesFile}.backup`)).toBe(true);
|
||||
});
|
||||
|
||||
test("should verify data integrity with checksum", async () => {
|
||||
const testSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "daily",
|
||||
description: "Integrity test",
|
||||
};
|
||||
|
||||
await scheduleService.addSchedule(testSchedule);
|
||||
|
||||
// Manually corrupt the file
|
||||
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
||||
const parsedData = JSON.parse(fileContent);
|
||||
|
||||
// Change checksum to simulate corruption
|
||||
parsedData.metadata.checksum = "invalid-checksum";
|
||||
fs.writeFileSync(testSchedulesFile, JSON.stringify(parsedData, null, 2));
|
||||
|
||||
// Loading should detect corruption
|
||||
const newService = new ScheduleService();
|
||||
newService.schedulesFile = testSchedulesFile;
|
||||
|
||||
await expect(newService.loadSchedules()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("File Locking", () => {
|
||||
test("should acquire and release file locks", async () => {
|
||||
await scheduleService.acquireFileLock();
|
||||
expect(fs.existsSync(testLockFile)).toBe(true);
|
||||
|
||||
await scheduleService.releaseFileLock();
|
||||
expect(fs.existsSync(testLockFile)).toBe(false);
|
||||
});
|
||||
|
||||
test("should handle concurrent access attempts", async () => {
|
||||
// Simulate concurrent access
|
||||
const service1 = new ScheduleService();
|
||||
const service2 = new ScheduleService();
|
||||
service1.schedulesFile = testSchedulesFile;
|
||||
service1.lockFile = testLockFile;
|
||||
service2.schedulesFile = testSchedulesFile;
|
||||
service2.lockFile = testLockFile;
|
||||
|
||||
// First service acquires lock
|
||||
await service1.acquireFileLock();
|
||||
|
||||
// Second service should fail to acquire lock
|
||||
await expect(service2.acquireFileLock()).rejects.toThrow(
|
||||
/Failed to acquire file lock/
|
||||
);
|
||||
|
||||
// Release first lock
|
||||
await service1.releaseFileLock();
|
||||
|
||||
// Now second service should be able to acquire lock
|
||||
await expect(service2.acquireFileLock()).resolves.not.toThrow();
|
||||
await service2.releaseFileLock();
|
||||
});
|
||||
|
||||
test("should handle stale lock files", async () => {
|
||||
// Create a stale lock file
|
||||
const staleLockData = {
|
||||
pid: 99999,
|
||||
timestamp: new Date(Date.now() - 10000).toISOString(), // 10 seconds ago
|
||||
operation: "test",
|
||||
};
|
||||
fs.writeFileSync(testLockFile, JSON.stringify(staleLockData));
|
||||
|
||||
// Should be able to acquire lock by removing stale lock
|
||||
await expect(scheduleService.acquireFileLock()).resolves.not.toThrow();
|
||||
await scheduleService.releaseFileLock();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Data Validation", () => {
|
||||
test("should validate schedule data comprehensively", () => {
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "weekly",
|
||||
description: "Valid schedule",
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(validSchedule)
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
test("should reject invalid operation types", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "invalid",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(invalidSchedule)
|
||||
).toThrow(/must be one of: update, rollback/);
|
||||
});
|
||||
|
||||
test("should reject past dates", () => {
|
||||
const pastSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() - 86400000).toISOString(), // Yesterday
|
||||
recurrence: "once",
|
||||
};
|
||||
|
||||
expect(() => scheduleService.validateScheduleData(pastSchedule)).toThrow(
|
||||
/must be in the future/
|
||||
);
|
||||
});
|
||||
|
||||
test("should validate description length", () => {
|
||||
const longDescription = "x".repeat(501); // Exceeds 500 char limit
|
||||
const invalidSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
description: longDescription,
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(invalidSchedule)
|
||||
).toThrow(/must not exceed 500 characters/);
|
||||
});
|
||||
|
||||
test("should prevent rollback operations from being recurring", () => {
|
||||
const invalidSchedule = {
|
||||
operationType: "rollback",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "daily", // Rollbacks should only be 'once'
|
||||
};
|
||||
|
||||
expect(() =>
|
||||
scheduleService.validateScheduleData(invalidSchedule)
|
||||
).toThrow(/Rollback operations can only be scheduled once/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Recovery", () => {
|
||||
test("should recover from corrupted files using backup", async () => {
|
||||
// Create valid schedule first
|
||||
const validSchedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
description: "Recovery test",
|
||||
};
|
||||
|
||||
await scheduleService.addSchedule(validSchedule);
|
||||
|
||||
// Corrupt the main file
|
||||
fs.writeFileSync(testSchedulesFile, "invalid json content");
|
||||
|
||||
// Recovery should work
|
||||
const recovered = await scheduleService.recoverFromCorruption();
|
||||
expect(Array.isArray(recovered)).toBe(true);
|
||||
});
|
||||
|
||||
test("should create empty file when no recovery possible", async () => {
|
||||
// Create corrupted file with no backup
|
||||
fs.writeFileSync(testSchedulesFile, "completely invalid");
|
||||
|
||||
const recovered = await scheduleService.recoverFromCorruption();
|
||||
expect(recovered).toEqual([]);
|
||||
|
||||
// Should create new empty file
|
||||
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
||||
const parsedData = JSON.parse(fileContent);
|
||||
expect(parsedData.schedules).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("State Management", () => {
|
||||
test("should cleanup resources properly", async () => {
|
||||
// Add some schedules and create locks
|
||||
await scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
});
|
||||
|
||||
await scheduleService.acquireFileLock();
|
||||
|
||||
// Cleanup should clear everything
|
||||
scheduleService.cleanup();
|
||||
|
||||
expect(scheduleService.persistenceQueue).toEqual([]);
|
||||
expect(scheduleService.isProcessingQueue).toBe(false);
|
||||
expect(scheduleService.isLoaded).toBe(false);
|
||||
});
|
||||
|
||||
test("should provide system state validation", async () => {
|
||||
const report = await scheduleService.validateSystemState();
|
||||
|
||||
expect(report).toHaveProperty("fileExists");
|
||||
expect(report).toHaveProperty("fileReadable");
|
||||
expect(report).toHaveProperty("fileWritable");
|
||||
expect(report).toHaveProperty("dataValid");
|
||||
expect(report).toHaveProperty("issues");
|
||||
expect(report).toHaveProperty("recommendations");
|
||||
});
|
||||
|
||||
test("should provide service statistics", () => {
|
||||
const stats = scheduleService.getServiceStats();
|
||||
|
||||
expect(stats).toHaveProperty("schedulesLoaded");
|
||||
expect(stats).toHaveProperty("schedulesCount");
|
||||
expect(stats).toHaveProperty("activeSchedules");
|
||||
expect(stats).toHaveProperty("pendingOperations");
|
||||
expect(stats).toHaveProperty("memoryUsage");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Atomic Operations", () => {
|
||||
test("should queue multiple save operations", async () => {
|
||||
const promises = [];
|
||||
|
||||
// Queue multiple operations simultaneously
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const schedule = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(
|
||||
Date.now() + 86400000 + i * 1000
|
||||
).toISOString(),
|
||||
recurrence: "once",
|
||||
description: `Schedule ${i}`,
|
||||
};
|
||||
promises.push(scheduleService.addSchedule(schedule));
|
||||
}
|
||||
|
||||
// All should complete successfully
|
||||
const results = await Promise.all(promises);
|
||||
expect(results).toHaveLength(5);
|
||||
|
||||
// All should have unique IDs
|
||||
const ids = results.map((r) => r.id);
|
||||
const uniqueIds = new Set(ids);
|
||||
expect(uniqueIds.size).toBe(5);
|
||||
});
|
||||
|
||||
test("should maintain data consistency during concurrent operations", async () => {
|
||||
const operations = [];
|
||||
|
||||
// Create multiple concurrent add/update/delete operations
|
||||
for (let i = 0; i < 3; i++) {
|
||||
operations.push(
|
||||
scheduleService.addSchedule({
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(
|
||||
Date.now() + 86400000 + i * 1000
|
||||
).toISOString(),
|
||||
recurrence: "once",
|
||||
description: `Concurrent ${i}`,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const schedules = await Promise.all(operations);
|
||||
|
||||
// Verify all schedules were saved
|
||||
const allSchedules = await scheduleService.getAllSchedules();
|
||||
expect(allSchedules).toHaveLength(3);
|
||||
|
||||
// Verify data integrity
|
||||
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
||||
const parsedData = JSON.parse(fileContent);
|
||||
expect(parsedData.metadata.totalSchedules).toBe(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,256 +0,0 @@
|
||||
const TagAnalysisService = require("../../../src/tui/services/TagAnalysisService.js");
|
||||
|
||||
// Mock dependencies
|
||||
const mockShopifyService = {
|
||||
// Mock implementation
|
||||
};
|
||||
|
||||
const mockProductService = {
|
||||
debugFetchAllProductTags: jest.fn(),
|
||||
fetchProductsByTag: jest.fn(),
|
||||
};
|
||||
|
||||
describe("TagAnalysisService Performance Optimizations", () => {
|
||||
let service;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new TagAnalysisService(mockShopifyService, mockProductService);
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (service) {
|
||||
service.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
describe("lazy loading", () => {
|
||||
it("should support paginated tag fetching", async () => {
|
||||
const mockProducts = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `product_${i}`,
|
||||
title: `Product ${i}`,
|
||||
tags: [`tag_${i % 10}`], // 10 different tags
|
||||
variants: [
|
||||
{
|
||||
id: `variant_${i}`,
|
||||
price: (i + 1) * 10,
|
||||
title: `Variant ${i}`,
|
||||
},
|
||||
],
|
||||
}));
|
||||
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
const result = await service.fetchAllTags(100, {
|
||||
page: 0,
|
||||
pageSize: 5,
|
||||
enableLazyLoading: true,
|
||||
sortBy: "productCount",
|
||||
sortOrder: "desc",
|
||||
});
|
||||
|
||||
expect(result.tags).toHaveLength(5); // Should return only 5 tags due to pagination
|
||||
expect(result.metadata.pagination).toBeDefined();
|
||||
expect(result.metadata.pagination.page).toBe(0);
|
||||
expect(result.metadata.pagination.pageSize).toBe(5);
|
||||
expect(result.metadata.pagination.hasMore).toBe(true);
|
||||
});
|
||||
|
||||
it("should fetch tags lazily with filtering", async () => {
|
||||
const mockTags = Array.from({ length: 50 }, (_, i) => ({
|
||||
tag: `tag_${i}`,
|
||||
productCount: i + 1,
|
||||
percentage: ((i + 1) / 50) * 100,
|
||||
variantCount: (i + 1) * 2,
|
||||
totalValue: (i + 1) * 100,
|
||||
averagePrice: 50 + i,
|
||||
priceRange: { min: 10, max: 100 },
|
||||
}));
|
||||
|
||||
// Mock the full dataset in cache
|
||||
service.cache.set("all_tags_full_dataset", {
|
||||
data: mockTags,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const result = await service.fetchTagsLazy({
|
||||
page: 0,
|
||||
pageSize: 10,
|
||||
searchQuery: "tag_1",
|
||||
minProductCount: 5,
|
||||
sortBy: "productCount",
|
||||
sortOrder: "desc",
|
||||
});
|
||||
|
||||
expect(result.tags.length).toBeLessThanOrEqual(10);
|
||||
expect(result.metadata.totalItems).toBeGreaterThan(0);
|
||||
expect(result.metadata.hasMore).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("caching optimizations", () => {
|
||||
it("should cache tag analysis results", async () => {
|
||||
const mockProducts = [
|
||||
{
|
||||
id: "product_1",
|
||||
title: "Product 1",
|
||||
tags: ["tag1", "tag2"],
|
||||
variants: [{ id: "variant_1", price: "10.00" }],
|
||||
},
|
||||
];
|
||||
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
// First call
|
||||
const result1 = await service.fetchAllTags(10);
|
||||
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
|
||||
1
|
||||
);
|
||||
|
||||
// Second call should use cache
|
||||
const result2 = await service.fetchAllTags(10);
|
||||
expect(mockProductService.debugFetchAllProductTags).toHaveBeenCalledTimes(
|
||||
1
|
||||
); // No additional call
|
||||
expect(result1).toEqual(result2);
|
||||
});
|
||||
|
||||
it("should track cache hit ratio", async () => {
|
||||
const mockProducts = [
|
||||
{
|
||||
id: "product_1",
|
||||
title: "Product 1",
|
||||
tags: ["tag1"],
|
||||
variants: [{ id: "variant_1", price: "10.00" }],
|
||||
},
|
||||
];
|
||||
|
||||
mockProductService.debugFetchAllProductTags.mockResolvedValue(
|
||||
mockProducts
|
||||
);
|
||||
|
||||
// Make multiple calls
|
||||
await service.fetchAllTags(10);
|
||||
await service.fetchAllTags(10); // Cache hit
|
||||
await service.fetchAllTags(10); // Cache hit
|
||||
|
||||
const memoryStats = service.getMemoryStats();
|
||||
expect(memoryStats.cacheHitRatio).toBeGreaterThan(0);
|
||||
expect(memoryStats.cacheEntries).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should clean up expired cache entries", async () => {
|
||||
// Add some cache entries with old timestamps
|
||||
service.cache.set("old_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
|
||||
});
|
||||
|
||||
service.cache.set("new_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
expect(service.cache.size).toBe(2);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBe(1);
|
||||
expect(service.cache.has("new_entry")).toBe(true);
|
||||
expect(service.cache.has("old_entry")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sorting optimizations", () => {
|
||||
it("should sort tags by different criteria", () => {
|
||||
const tags = [
|
||||
{ tag: "c", productCount: 10, averagePrice: 50, totalValue: 500 },
|
||||
{ tag: "a", productCount: 20, averagePrice: 30, totalValue: 600 },
|
||||
{ tag: "b", productCount: 15, averagePrice: 40, totalValue: 400 },
|
||||
];
|
||||
|
||||
// Sort by product count (desc)
|
||||
service.sortTags(tags, "productCount", "desc");
|
||||
expect(tags[0].tag).toBe("a"); // 20 products
|
||||
|
||||
// Sort by tag name (asc)
|
||||
service.sortTags(tags, "tag", "asc");
|
||||
expect(tags[0].tag).toBe("a"); // alphabetically first
|
||||
|
||||
// Sort by average price (desc)
|
||||
service.sortTags(tags, "averagePrice", "desc");
|
||||
expect(tags[0].tag).toBe("c"); // highest price
|
||||
});
|
||||
});
|
||||
|
||||
describe("memory management", () => {
|
||||
it("should provide memory usage statistics", () => {
|
||||
const stats = service.getMemoryStats();
|
||||
|
||||
expect(stats).toHaveProperty("cacheEntries");
|
||||
expect(stats).toHaveProperty("estimatedSizeBytes");
|
||||
expect(stats).toHaveProperty("estimatedSizeMB");
|
||||
expect(stats).toHaveProperty("maxEntries");
|
||||
expect(stats).toHaveProperty("cacheHitRatio");
|
||||
});
|
||||
|
||||
it("should limit cache size to prevent memory issues", async () => {
|
||||
// Fill cache beyond limit
|
||||
for (let i = 0; i < 60; i++) {
|
||||
service.cache.set(`entry_${i}`, {
|
||||
data: { large: "data".repeat(1000) },
|
||||
timestamp: Date.now() - i * 1000, // Different timestamps
|
||||
});
|
||||
}
|
||||
|
||||
expect(service.cache.size).toBeGreaterThan(50);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBeLessThanOrEqual(50);
|
||||
});
|
||||
|
||||
it("should clean up resources on destroy", () => {
|
||||
const initialCacheSize = service.cache.size;
|
||||
|
||||
service.destroy();
|
||||
|
||||
expect(service.cache.size).toBe(0);
|
||||
expect(service.cleanupInterval).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("preloading", () => {
|
||||
it("should preload next page without blocking", async () => {
|
||||
const mockTags = Array.from({ length: 50 }, (_, i) => ({
|
||||
tag: `tag_${i}`,
|
||||
productCount: i + 1,
|
||||
percentage: ((i + 1) / 50) * 100,
|
||||
variantCount: (i + 1) * 2,
|
||||
totalValue: (i + 1) * 100,
|
||||
averagePrice: 50 + i,
|
||||
priceRange: { min: 10, max: 100 },
|
||||
}));
|
||||
|
||||
// Mock the full dataset in cache
|
||||
service.cache.set("all_tags_full_dataset", {
|
||||
data: mockTags,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const options = {
|
||||
page: 0,
|
||||
pageSize: 10,
|
||||
sortBy: "productCount",
|
||||
sortOrder: "desc",
|
||||
};
|
||||
|
||||
// Preload should not throw errors
|
||||
await expect(service.preloadNextPage(options)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,239 +0,0 @@
|
||||
const PerformanceOptimizer = require("../../../src/tui/utils/PerformanceOptimizer.js");
|
||||
|
||||
describe("PerformanceOptimizer", () => {
|
||||
let optimizer;
|
||||
|
||||
beforeEach(() => {
|
||||
optimizer = new PerformanceOptimizer();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
optimizer.destroy();
|
||||
});
|
||||
|
||||
describe("debounce", () => {
|
||||
it("should debounce function calls", (done) => {
|
||||
let callCount = 0;
|
||||
const testFunction = () => {
|
||||
callCount++;
|
||||
};
|
||||
|
||||
const debouncedFunction = optimizer.debounce(testFunction, 100, "test");
|
||||
|
||||
// Call multiple times rapidly
|
||||
debouncedFunction();
|
||||
debouncedFunction();
|
||||
debouncedFunction();
|
||||
|
||||
// Should not have been called yet
|
||||
expect(callCount).toBe(0);
|
||||
|
||||
// Wait for debounce delay
|
||||
setTimeout(() => {
|
||||
expect(callCount).toBe(1);
|
||||
done();
|
||||
}, 150);
|
||||
});
|
||||
});
|
||||
|
||||
describe("throttle", () => {
|
||||
it("should throttle function calls", (done) => {
|
||||
let callCount = 0;
|
||||
const testFunction = () => {
|
||||
callCount++;
|
||||
};
|
||||
|
||||
const throttledFunction = optimizer.throttle(testFunction, 100, "test");
|
||||
|
||||
// Call multiple times rapidly
|
||||
throttledFunction();
|
||||
throttledFunction();
|
||||
throttledFunction();
|
||||
|
||||
// Should have been called once immediately
|
||||
expect(callCount).toBe(1);
|
||||
|
||||
// Wait and call again
|
||||
setTimeout(() => {
|
||||
throttledFunction();
|
||||
expect(callCount).toBe(2);
|
||||
done();
|
||||
}, 150);
|
||||
});
|
||||
});
|
||||
|
||||
describe("memoize", () => {
|
||||
it("should memoize function results", () => {
|
||||
let callCount = 0;
|
||||
const expensiveFunction = (x, y) => {
|
||||
callCount++;
|
||||
return x + y;
|
||||
};
|
||||
|
||||
const memoizedFunction = optimizer.memoize(expensiveFunction);
|
||||
|
||||
// First call
|
||||
const result1 = memoizedFunction(1, 2);
|
||||
expect(result1).toBe(3);
|
||||
expect(callCount).toBe(1);
|
||||
|
||||
// Second call with same arguments
|
||||
const result2 = memoizedFunction(1, 2);
|
||||
expect(result2).toBe(3);
|
||||
expect(callCount).toBe(1); // Should not have called function again
|
||||
|
||||
// Third call with different arguments
|
||||
const result3 = memoizedFunction(2, 3);
|
||||
expect(result3).toBe(5);
|
||||
expect(callCount).toBe(2);
|
||||
});
|
||||
|
||||
it("should limit cache size", () => {
|
||||
const testFunction = (x) => x * 2;
|
||||
const memoizedFunction = optimizer.memoize(testFunction, undefined, 2);
|
||||
|
||||
// Fill cache beyond limit
|
||||
memoizedFunction(1);
|
||||
memoizedFunction(2);
|
||||
memoizedFunction(3); // Should evict first entry
|
||||
|
||||
// Verify first entry was evicted
|
||||
let callCount = 0;
|
||||
const countingFunction = (x) => {
|
||||
callCount++;
|
||||
return x * 2;
|
||||
};
|
||||
const countingMemoized = optimizer.memoize(
|
||||
countingFunction,
|
||||
undefined,
|
||||
2
|
||||
);
|
||||
|
||||
countingMemoized(1);
|
||||
countingMemoized(2);
|
||||
expect(callCount).toBe(2);
|
||||
|
||||
countingMemoized(3); // Should evict entry for 1
|
||||
expect(callCount).toBe(3);
|
||||
|
||||
countingMemoized(1); // Should call function again since it was evicted
|
||||
expect(callCount).toBe(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createVirtualScrolling", () => {
|
||||
it("should calculate virtual scrolling data correctly", () => {
|
||||
const items = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `Item ${i}`,
|
||||
}));
|
||||
const result = optimizer.createVirtualScrolling(items, 300, 30, 150);
|
||||
|
||||
expect(result.totalHeight).toBe(3000); // 100 items * 30px each
|
||||
expect(result.startIndex).toBe(5); // 150px / 30px per item
|
||||
expect(result.visibleItems.length).toBeGreaterThan(0);
|
||||
expect(result.visibleItems.length).toBeLessThanOrEqual(12); // Visible count + buffer
|
||||
});
|
||||
});
|
||||
|
||||
describe("createLazyLoading", () => {
|
||||
it("should create lazy loading data correctly", () => {
|
||||
const items = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `Item ${i}`,
|
||||
}));
|
||||
const result = optimizer.createLazyLoading(items, 10, 5);
|
||||
|
||||
expect(result.startIndex).toBe(5); // 10 - 5
|
||||
expect(result.endIndex).toBe(20); // 10 + 5*2
|
||||
expect(result.loadedItems.length).toBe(15); // 20 - 5
|
||||
expect(result.hasMore).toBe(true);
|
||||
expect(result.hasPrevious).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("memory management", () => {
|
||||
it("should track memory usage", () => {
|
||||
const stats = optimizer.getMemoryUsage();
|
||||
expect(stats).toHaveProperty("estimatedSizeBytes");
|
||||
expect(stats).toHaveProperty("estimatedSizeMB");
|
||||
expect(stats).toHaveProperty("cacheEntries");
|
||||
expect(stats).toHaveProperty("eventListeners");
|
||||
expect(stats).toHaveProperty("activeTimers");
|
||||
expect(stats).toHaveProperty("memoryPressure");
|
||||
});
|
||||
|
||||
it("should clean up expired cache entries", () => {
|
||||
// Add some cache entries
|
||||
optimizer.componentCache.set("test1", {
|
||||
data: "test",
|
||||
timestamp: Date.now() - 10000,
|
||||
});
|
||||
optimizer.componentCache.set("test2", {
|
||||
data: "test",
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
optimizer.cleanupExpiredCache(5000); // 5 second max age
|
||||
|
||||
expect(optimizer.componentCache.has("test1")).toBe(false);
|
||||
expect(optimizer.componentCache.has("test2")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("event listener management", () => {
|
||||
it("should register and cleanup event listeners", () => {
|
||||
const mockTarget = {
|
||||
addEventListener: jest.fn(),
|
||||
removeEventListener: jest.fn(),
|
||||
};
|
||||
|
||||
const handler = () => {};
|
||||
|
||||
optimizer.registerEventListener(
|
||||
"component1",
|
||||
"click",
|
||||
handler,
|
||||
mockTarget
|
||||
);
|
||||
expect(mockTarget.addEventListener).toHaveBeenCalledWith(
|
||||
"click",
|
||||
handler
|
||||
);
|
||||
|
||||
optimizer.cleanupEventListeners("component1");
|
||||
expect(mockTarget.removeEventListener).toHaveBeenCalledWith(
|
||||
"click",
|
||||
handler
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("batched updates", () => {
|
||||
it("should batch updates correctly", (done) => {
|
||||
let batchedUpdates = [];
|
||||
const updateFunction = async (batch) => {
|
||||
batchedUpdates.push(batch);
|
||||
};
|
||||
|
||||
const batchedUpdate = optimizer.createBatchedUpdate(
|
||||
updateFunction,
|
||||
3,
|
||||
50
|
||||
);
|
||||
|
||||
// Add updates
|
||||
batchedUpdate("update1");
|
||||
batchedUpdate("update2");
|
||||
batchedUpdate("update3");
|
||||
batchedUpdate("update4");
|
||||
|
||||
// Wait for batching
|
||||
setTimeout(() => {
|
||||
expect(batchedUpdates.length).toBeGreaterThan(0);
|
||||
expect(batchedUpdates[0]).toEqual(["update1", "update2", "update3"]);
|
||||
done();
|
||||
}, 100);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,130 +0,0 @@
|
||||
/**
|
||||
* Input Validator Tests
|
||||
* Tests for comprehensive input validation
|
||||
* Requirements: 5.4, 5.6
|
||||
*/
|
||||
|
||||
const inputValidator = require("../../../src/tui/utils/inputValidator");
|
||||
|
||||
describe("InputValidator Tests", () => {
|
||||
test("should validate operation type correctly", () => {
|
||||
const validResult = inputValidator.validateField("operationType", "update");
|
||||
expect(validResult.isValid).toBe(true);
|
||||
expect(validResult.value).toBe("update");
|
||||
|
||||
const invalidResult = inputValidator.validateField(
|
||||
"operationType",
|
||||
"invalid"
|
||||
);
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
expect(invalidResult.errors).toContain(
|
||||
"operationType must be one of: update, rollback"
|
||||
);
|
||||
});
|
||||
|
||||
test("should validate scheduled time correctly", () => {
|
||||
const futureDate = new Date(Date.now() + 86400000).toISOString();
|
||||
const validResult = inputValidator.validateField(
|
||||
"scheduledTime",
|
||||
futureDate
|
||||
);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const pastDate = new Date(Date.now() - 86400000).toISOString();
|
||||
const invalidResult = inputValidator.validateField(
|
||||
"scheduledTime",
|
||||
pastDate
|
||||
);
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
});
|
||||
|
||||
test("should validate shop domain correctly", () => {
|
||||
const validDomain = "test-store.myshopify.com";
|
||||
const validResult = inputValidator.validateField("shopDomain", validDomain);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
|
||||
const invalidDomain = "invalid domain";
|
||||
const invalidResult = inputValidator.validateField(
|
||||
"shopDomain",
|
||||
invalidDomain
|
||||
);
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
});
|
||||
|
||||
test("should validate price adjustment correctly", () => {
|
||||
const validPercentage = 25.5;
|
||||
const validResult = inputValidator.validateField(
|
||||
"priceAdjustment",
|
||||
validPercentage
|
||||
);
|
||||
expect(validResult.isValid).toBe(true);
|
||||
expect(validResult.value).toBe(25.5);
|
||||
|
||||
const invalidPercentage = 1500; // Too high
|
||||
const invalidResult = inputValidator.validateField(
|
||||
"priceAdjustment",
|
||||
invalidPercentage
|
||||
);
|
||||
expect(invalidResult.isValid).toBe(false);
|
||||
});
|
||||
|
||||
test("should validate multiple fields", () => {
|
||||
const data = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "weekly",
|
||||
description: "Test schedule",
|
||||
};
|
||||
|
||||
const result = inputValidator.validateFields(data);
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.data.operationType).toBe("update");
|
||||
expect(result.data.recurrence).toBe("weekly");
|
||||
});
|
||||
|
||||
test("should handle optional fields correctly", () => {
|
||||
const data = {
|
||||
operationType: "update",
|
||||
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
||||
recurrence: "once",
|
||||
// description is optional and missing
|
||||
};
|
||||
|
||||
const result = inputValidator.validateFields(data);
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.data.description).toBeUndefined();
|
||||
});
|
||||
|
||||
test("should convert string numbers to numbers", () => {
|
||||
const result = inputValidator.validateField("priceAdjustment", "25.5");
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.value).toBe(25.5);
|
||||
expect(typeof result.value).toBe("number");
|
||||
});
|
||||
|
||||
test("should sanitize input strings", () => {
|
||||
const dirtyInput = " test string with \x00 control chars ";
|
||||
const sanitized = inputValidator.sanitizeInput(dirtyInput, {
|
||||
trim: true,
|
||||
removeControlChars: true,
|
||||
});
|
||||
|
||||
expect(sanitized).toBe("test string with control chars");
|
||||
});
|
||||
|
||||
test("should validate string length limits", () => {
|
||||
const longDescription = "x".repeat(501);
|
||||
const result = inputValidator.validateField("description", longDescription);
|
||||
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(
|
||||
result.errors.some((error) => error.includes("500 characters"))
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test("should validate required fields", () => {
|
||||
const result = inputValidator.validateField("operationType", "");
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.errors).toContain("operationType is required");
|
||||
});
|
||||
});
|
||||
@@ -1,122 +0,0 @@
|
||||
/**
|
||||
* State Manager Tests
|
||||
* Tests for state management and cleanup functionality
|
||||
* Requirements: 5.4, 5.6
|
||||
*/
|
||||
|
||||
const stateManager = require("../../../src/tui/utils/stateManager");
|
||||
|
||||
describe("StateManager Tests", () => {
|
||||
beforeEach(() => {
|
||||
// Clear all states before each test
|
||||
stateManager.clearAllStates();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Cleanup after each test
|
||||
stateManager.clearAllStates();
|
||||
});
|
||||
|
||||
test("should register screen handlers", () => {
|
||||
const mockCleanup = jest.fn();
|
||||
const mockValidate = jest
|
||||
.fn()
|
||||
.mockResolvedValue({ isValid: true, errors: [] });
|
||||
|
||||
stateManager.registerScreen("test-screen", {
|
||||
cleanup: mockCleanup,
|
||||
validate: mockValidate,
|
||||
});
|
||||
|
||||
expect(stateManager.cleanupHandlers.has("test-screen")).toBe(true);
|
||||
expect(stateManager.stateValidators.has("test-screen")).toBe(true);
|
||||
});
|
||||
|
||||
test("should save and restore screen state", async () => {
|
||||
const testState = {
|
||||
selectedIndex: 5,
|
||||
formData: { name: "test" },
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
await stateManager.saveScreenState("test-screen", testState);
|
||||
const restoredState = await stateManager.restoreScreenState("test-screen");
|
||||
|
||||
expect(restoredState.selectedIndex).toBe(5);
|
||||
expect(restoredState.formData.name).toBe("test");
|
||||
expect(restoredState._metadata).toBeUndefined(); // Metadata should be stripped
|
||||
});
|
||||
|
||||
test("should perform screen transitions with cleanup", async () => {
|
||||
const mockCleanup = jest.fn().mockResolvedValue();
|
||||
|
||||
stateManager.registerScreen("from-screen", {
|
||||
cleanup: mockCleanup,
|
||||
});
|
||||
|
||||
const currentState = { data: "test" };
|
||||
|
||||
await stateManager.switchScreen("from-screen", "to-screen", currentState);
|
||||
|
||||
expect(mockCleanup).toHaveBeenCalled();
|
||||
expect(stateManager.activeScreen).toBe("to-screen");
|
||||
});
|
||||
|
||||
test("should validate states", async () => {
|
||||
const mockValidator = jest.fn().mockResolvedValue({
|
||||
isValid: false,
|
||||
errors: ["Test error"],
|
||||
});
|
||||
|
||||
stateManager.registerScreen("test-screen", {
|
||||
validate: mockValidator,
|
||||
});
|
||||
|
||||
await stateManager.saveScreenState("test-screen", { data: "test" });
|
||||
|
||||
const report = await stateManager.validateAllStates();
|
||||
|
||||
expect(report.invalidStates).toBe(1);
|
||||
expect(report.errors).toHaveLength(1);
|
||||
expect(mockValidator).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("should provide memory statistics", () => {
|
||||
stateManager.saveScreenState("screen1", { data: "test1" });
|
||||
stateManager.saveScreenState("screen2", { data: "test2" });
|
||||
|
||||
const stats = stateManager.getMemoryStats();
|
||||
|
||||
expect(stats.screenCount).toBe(2);
|
||||
expect(stats.totalSize).toBeGreaterThan(0);
|
||||
expect(stats.screenSizes).toHaveProperty("screen1");
|
||||
expect(stats.screenSizes).toHaveProperty("screen2");
|
||||
});
|
||||
|
||||
test("should track navigation history", async () => {
|
||||
await stateManager.switchScreen("screen1", "screen2", {});
|
||||
await stateManager.switchScreen("screen2", "screen3", {});
|
||||
|
||||
const history = stateManager.getHistory(5);
|
||||
|
||||
expect(history).toHaveLength(2);
|
||||
expect(history[0].from).toBe("screen2");
|
||||
expect(history[0].to).toBe("screen3");
|
||||
expect(history[1].from).toBe("screen1");
|
||||
expect(history[1].to).toBe("screen2");
|
||||
});
|
||||
|
||||
test("should clear screen states", () => {
|
||||
stateManager.saveScreenState("screen1", { data: "test1" });
|
||||
stateManager.saveScreenState("screen2", { data: "test2" });
|
||||
|
||||
expect(stateManager.screenStates.size).toBe(2);
|
||||
|
||||
stateManager.clearScreenState("screen1");
|
||||
expect(stateManager.screenStates.size).toBe(1);
|
||||
expect(stateManager.screenStates.has("screen1")).toBe(false);
|
||||
|
||||
stateManager.clearAllStates();
|
||||
expect(stateManager.screenStates.size).toBe(0);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user