TUI is a doomed path. Stick with CLI
This commit is contained in:
259
tests/tui/services/LogService.performance.test.js
Normal file
259
tests/tui/services/LogService.performance.test.js
Normal file
@@ -0,0 +1,259 @@
|
||||
const LogService = require("../../../src/tui/services/LogService.js");
|
||||
|
||||
describe("LogService Performance Optimizations", () => {
|
||||
let service;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new LogService("test-progress.md");
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (service) {
|
||||
service.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
describe("efficient pagination", () => {
|
||||
it("should paginate logs efficiently", () => {
|
||||
const logs = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `log_${i}`,
|
||||
timestamp: new Date(),
|
||||
title: `Log Entry ${i}`,
|
||||
message: `Message ${i}`,
|
||||
level: "INFO",
|
||||
}));
|
||||
|
||||
const result = service.paginateLogs(logs, 2, 10); // Page 2, 10 items per page
|
||||
|
||||
expect(result.entries).toHaveLength(10);
|
||||
expect(result.pagination.currentPage).toBe(2);
|
||||
expect(result.pagination.totalPages).toBe(10);
|
||||
expect(result.pagination.hasNextPage).toBe(true);
|
||||
expect(result.pagination.hasPreviousPage).toBe(true);
|
||||
expect(result.pagination.startIndex).toBe(21); // 1-based index
|
||||
expect(result.pagination.endIndex).toBe(30);
|
||||
});
|
||||
|
||||
it("should handle edge cases in pagination", () => {
|
||||
const logs = Array.from({ length: 5 }, (_, i) => ({
|
||||
id: `log_${i}`,
|
||||
timestamp: new Date(),
|
||||
title: `Log Entry ${i}`,
|
||||
message: `Message ${i}`,
|
||||
level: "INFO",
|
||||
}));
|
||||
|
||||
// Last page
|
||||
const result = service.paginateLogs(logs, 0, 10);
|
||||
|
||||
expect(result.entries).toHaveLength(5);
|
||||
expect(result.pagination.totalPages).toBe(1);
|
||||
expect(result.pagination.hasNextPage).toBe(false);
|
||||
expect(result.pagination.hasPreviousPage).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("streaming for large files", () => {
|
||||
it("should parse log content in streaming mode", async () => {
|
||||
const mockContent = "Test log content";
|
||||
|
||||
const result = await service.parseLogContentStreaming(
|
||||
mockContent,
|
||||
{
|
||||
dateRange: "all",
|
||||
operationType: "all",
|
||||
status: "all",
|
||||
searchTerm: "",
|
||||
},
|
||||
0,
|
||||
10
|
||||
);
|
||||
|
||||
expect(result).toHaveProperty("entries");
|
||||
expect(result).toHaveProperty("totalCount");
|
||||
expect(Array.isArray(result.entries)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("caching optimizations", () => {
|
||||
it("should track cache statistics", () => {
|
||||
const stats = service.getCacheStats();
|
||||
|
||||
expect(stats).toHaveProperty("size");
|
||||
expect(stats).toHaveProperty("keys");
|
||||
expect(typeof stats.size).toBe("number");
|
||||
expect(Array.isArray(stats.keys)).toBe(true);
|
||||
});
|
||||
|
||||
it("should provide memory usage statistics", () => {
|
||||
const stats = service.getMemoryStats();
|
||||
|
||||
expect(stats).toHaveProperty("cacheEntries");
|
||||
expect(stats).toHaveProperty("estimatedSizeBytes");
|
||||
expect(stats).toHaveProperty("estimatedSizeMB");
|
||||
expect(stats).toHaveProperty("maxEntries");
|
||||
expect(stats).toHaveProperty("cacheHitRatio");
|
||||
});
|
||||
});
|
||||
|
||||
describe("memory management", () => {
|
||||
it("should clean up expired cache entries", () => {
|
||||
// Add some cache entries with old timestamps
|
||||
service.cache.set("old_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now() - 10 * 60 * 1000, // 10 minutes ago
|
||||
});
|
||||
|
||||
service.cache.set("new_entry", {
|
||||
data: { test: "data" },
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
expect(service.cache.size).toBe(2);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBe(1);
|
||||
expect(service.cache.has("new_entry")).toBe(true);
|
||||
expect(service.cache.has("old_entry")).toBe(false);
|
||||
});
|
||||
|
||||
it("should limit cache size to prevent memory issues", () => {
|
||||
// Fill cache beyond limit
|
||||
for (let i = 0; i < 40; i++) {
|
||||
service.cache.set(`entry_${i}`, {
|
||||
data: { large: "data".repeat(1000) },
|
||||
timestamp: Date.now() - i * 1000, // Different timestamps
|
||||
});
|
||||
}
|
||||
|
||||
expect(service.cache.size).toBeGreaterThan(30);
|
||||
|
||||
service.cleanup();
|
||||
|
||||
expect(service.cache.size).toBeLessThanOrEqual(30);
|
||||
});
|
||||
|
||||
it("should clean up resources on destroy", () => {
|
||||
service.destroy();
|
||||
|
||||
expect(service.cache.size).toBe(0);
|
||||
expect(service.cleanupInterval).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("filtering optimizations", () => {
|
||||
it("should filter logs efficiently", () => {
|
||||
const logs = [
|
||||
{
|
||||
id: "log_1",
|
||||
timestamp: new Date("2024-01-01"),
|
||||
title: "Update Product A",
|
||||
message: "Product updated successfully",
|
||||
level: "SUCCESS",
|
||||
type: "update",
|
||||
details: "Product A details",
|
||||
productTitle: "Product A",
|
||||
},
|
||||
{
|
||||
id: "log_2",
|
||||
timestamp: new Date("2024-01-02"),
|
||||
title: "Error Product B",
|
||||
message: "Product update failed",
|
||||
level: "ERROR",
|
||||
type: "update",
|
||||
details: "Product B error details",
|
||||
productTitle: "Product B",
|
||||
},
|
||||
{
|
||||
id: "log_3",
|
||||
timestamp: new Date("2024-01-03"),
|
||||
title: "Rollback Product C",
|
||||
message: "Product rollback completed",
|
||||
level: "INFO",
|
||||
type: "rollback",
|
||||
details: "Product C rollback details",
|
||||
productTitle: "Product C",
|
||||
},
|
||||
];
|
||||
|
||||
// Filter by operation type
|
||||
const updateLogs = service.filterLogs(logs, { operationType: "update" });
|
||||
expect(updateLogs).toHaveLength(2);
|
||||
|
||||
// Filter by status
|
||||
const errorLogs = service.filterLogs(logs, { status: "error" });
|
||||
expect(errorLogs).toHaveLength(1);
|
||||
expect(errorLogs[0].level).toBe("ERROR");
|
||||
|
||||
// Filter by search term
|
||||
const productALogs = service.filterLogs(logs, {
|
||||
searchTerm: "Product A",
|
||||
});
|
||||
expect(productALogs).toHaveLength(1);
|
||||
expect(productALogs[0].productTitle).toBe("Product A");
|
||||
});
|
||||
|
||||
it("should handle date range filtering", () => {
|
||||
const now = new Date();
|
||||
const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||
const lastWeek = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
const logs = [
|
||||
{
|
||||
id: "log_1",
|
||||
timestamp: now,
|
||||
title: "Recent Log",
|
||||
message: "Recent message",
|
||||
level: "INFO",
|
||||
},
|
||||
{
|
||||
id: "log_2",
|
||||
timestamp: yesterday,
|
||||
title: "Yesterday Log",
|
||||
message: "Yesterday message",
|
||||
level: "INFO",
|
||||
},
|
||||
{
|
||||
id: "log_3",
|
||||
timestamp: lastWeek,
|
||||
title: "Old Log",
|
||||
message: "Old message",
|
||||
level: "INFO",
|
||||
},
|
||||
];
|
||||
|
||||
// Filter by today
|
||||
const todayLogs = service.filterLogs(logs, { dateRange: "today" });
|
||||
expect(todayLogs).toHaveLength(1);
|
||||
expect(todayLogs[0].title).toBe("Recent Log");
|
||||
|
||||
// Filter by week
|
||||
const weekLogs = service.filterLogs(logs, { dateRange: "week" });
|
||||
expect(weekLogs.length).toBeGreaterThanOrEqual(2); // Should include recent and yesterday
|
||||
});
|
||||
});
|
||||
|
||||
describe("preloading", () => {
|
||||
it("should preload next page without blocking", async () => {
|
||||
const options = {
|
||||
page: 0,
|
||||
pageSize: 10,
|
||||
dateRange: "all",
|
||||
operationType: "all",
|
||||
status: "all",
|
||||
searchTerm: "",
|
||||
};
|
||||
|
||||
// Mock the getFilteredLogs method to avoid actual file operations
|
||||
service.getFilteredLogs = jest.fn().mockResolvedValue({
|
||||
entries: [],
|
||||
pagination: { hasNextPage: true },
|
||||
});
|
||||
|
||||
// Preload should not throw errors
|
||||
await expect(service.preloadNextPage(options)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user