Just a whole lot of crap

This commit is contained in:
2025-08-14 16:36:12 -05:00
parent 66b7e42275
commit 62f6d6f279
144 changed files with 41421 additions and 2458 deletions

View File

@@ -0,0 +1,455 @@
const LogReaderService = require("../../../../src/services/logReader");
// Mock the LogReaderService
jest.mock("../../../../src/services/logReader");
describe("LogViewerScreen - Search and Filtering", () => {
let mockLogReader;
let mockPaginatedData;
beforeEach(() => {
jest.clearAllMocks();
// Setup mock data with various entry types for testing
mockPaginatedData = {
entries: [
{
id: "entry_1",
type: "operation_start",
timestamp: new Date("2025-08-06T20:30:00Z"),
level: "INFO",
message: "Price Update Operation Started",
title: "Price Update Operation",
details: "Target Tag: summer-sale\nPrice Adjustment: -10%",
configuration: {
"Target Tag": "summer-sale",
"Price Adjustment": "-10%",
},
},
{
id: "entry_2",
type: "product_update",
timestamp: new Date("2025-08-06T20:30:30Z"),
level: "SUCCESS",
message: "Updated The Hidden Snowboard",
title: "Product Update: The Hidden Snowboard",
details:
"Product ID: gid://shopify/Product/8116504920355\nPrice: $749.99 → $674.99",
productTitle: "The Hidden Snowboard",
productId: "gid://shopify/Product/8116504920355",
},
{
id: "entry_3",
type: "error",
timestamp: new Date("2025-08-06T20:31:00Z"),
level: "ERROR",
message: "Failed to update Product XYZ",
title: "Error: Product XYZ",
details: "Product ID: xyz123\nError: Rate limit exceeded",
productTitle: "Product XYZ",
productId: "xyz123",
},
{
id: "entry_4",
type: "rollback",
timestamp: new Date(),
level: "INFO",
message: "Rollback Operation Started",
title: "Rollback Operation",
details: "Rolling back previous changes",
configuration: { "Operation Mode": "rollback" },
},
],
pagination: {
currentPage: 0,
pageSize: 10,
totalEntries: 4,
totalPages: 1,
hasNextPage: false,
hasPreviousPage: false,
startIndex: 1,
endIndex: 4,
},
filters: {
levelFilter: "ALL",
searchTerm: "",
},
};
// Setup LogReaderService mock
mockLogReader = {
getPaginatedEntries: jest.fn().mockResolvedValue(mockPaginatedData),
getLogStatistics: jest.fn().mockResolvedValue({}),
clearCache: jest.fn(),
watchFile: jest.fn().mockReturnValue(() => {}),
};
LogReaderService.mockImplementation(() => mockLogReader);
});
describe("Level Filtering", () => {
test("supports filtering by ERROR level", async () => {
const errorOnlyData = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) => e.level === "ERROR"),
filters: { ...mockPaginatedData.filters, levelFilter: "ERROR" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(errorOnlyData);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "ERROR",
});
expect(mockLogReader.getPaginatedEntries).toHaveBeenCalledWith(
expect.objectContaining({ levelFilter: "ERROR" })
);
expect(result.filters.levelFilter).toBe("ERROR");
});
test("supports filtering by SUCCESS level", async () => {
const successOnlyData = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) => e.level === "SUCCESS"),
filters: { ...mockPaginatedData.filters, levelFilter: "SUCCESS" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(successOnlyData);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "SUCCESS",
});
expect(result.filters.levelFilter).toBe("SUCCESS");
});
test("supports filtering by INFO level", async () => {
const infoOnlyData = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) => e.level === "INFO"),
filters: { ...mockPaginatedData.filters, levelFilter: "INFO" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(infoOnlyData);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "INFO",
});
expect(result.filters.levelFilter).toBe("INFO");
});
test("supports showing all levels", async () => {
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "ALL",
});
expect(result.filters.levelFilter).toBe("ALL");
expect(result.entries.length).toBe(4); // All entries
});
});
describe("Text Search", () => {
test("searches in message content", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) =>
e.message.toLowerCase().includes("snowboard")
),
filters: { ...mockPaginatedData.filters, searchTerm: "snowboard" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "snowboard",
});
expect(mockLogReader.getPaginatedEntries).toHaveBeenCalledWith(
expect.objectContaining({ searchTerm: "snowboard" })
);
expect(result.filters.searchTerm).toBe("snowboard");
});
test("searches in title content", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) =>
e.title.toLowerCase().includes("error")
),
filters: { ...mockPaginatedData.filters, searchTerm: "error" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "error",
});
expect(result.filters.searchTerm).toBe("error");
});
test("searches in details content", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) =>
e.details.toLowerCase().includes("rate limit")
),
filters: { ...mockPaginatedData.filters, searchTerm: "rate limit" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "rate limit",
});
expect(result.filters.searchTerm).toBe("rate limit");
});
test("searches in product titles", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter(
(e) =>
e.productTitle && e.productTitle.toLowerCase().includes("hidden")
),
filters: { ...mockPaginatedData.filters, searchTerm: "hidden" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "hidden",
});
expect(result.filters.searchTerm).toBe("hidden");
});
test("handles case-insensitive search", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) =>
e.message.toLowerCase().includes("update")
),
filters: { ...mockPaginatedData.filters, searchTerm: "UPDATE" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "UPDATE",
});
expect(result.filters.searchTerm).toBe("UPDATE");
});
});
describe("Advanced Search Features", () => {
test("searches by operation type", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) => e.type === "rollback"),
filters: { ...mockPaginatedData.filters, searchTerm: "rollback" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "rollback",
});
expect(result.filters.searchTerm).toBe("rollback");
});
test("searches in configuration values", async () => {
const searchResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter(
(e) =>
e.configuration &&
Object.values(e.configuration).some((v) =>
v.includes("summer-sale")
)
),
filters: { ...mockPaginatedData.filters, searchTerm: "summer-sale" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(searchResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "summer-sale",
});
expect(result.filters.searchTerm).toBe("summer-sale");
});
test("supports date-based search for today", async () => {
const todayResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter((e) => {
const today = new Date();
return e.timestamp.toDateString() === today.toDateString();
}),
filters: { ...mockPaginatedData.filters, searchTerm: "today" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(todayResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "today",
});
expect(result.filters.searchTerm).toBe("today");
});
test("returns empty results for non-matching search", async () => {
const emptyResults = {
...mockPaginatedData,
entries: [],
pagination: {
...mockPaginatedData.pagination,
totalEntries: 0,
totalPages: 0,
endIndex: 0,
},
filters: { ...mockPaginatedData.filters, searchTerm: "nonexistent" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(emptyResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "nonexistent",
});
expect(result.entries).toHaveLength(0);
expect(result.pagination.totalEntries).toBe(0);
});
});
describe("Combined Filtering", () => {
test("supports combining level filter and search", async () => {
const combinedResults = {
...mockPaginatedData,
entries: mockPaginatedData.entries.filter(
(e) =>
e.level === "ERROR" && e.message.toLowerCase().includes("failed")
),
filters: { levelFilter: "ERROR", searchTerm: "failed" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(combinedResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "ERROR",
searchTerm: "failed",
});
expect(result.filters.levelFilter).toBe("ERROR");
expect(result.filters.searchTerm).toBe("failed");
});
test("resets pagination when applying filters", async () => {
const logReader = new LogReaderService();
// Apply filter and verify page resets to 0
await logReader.getPaginatedEntries({
levelFilter: "ERROR",
page: 0, // Should reset to 0 when filtering
});
expect(mockLogReader.getPaginatedEntries).toHaveBeenCalledWith(
expect.objectContaining({ page: 0 })
);
});
});
describe("Filter Persistence", () => {
test("maintains filters across pagination", async () => {
const logReader = new LogReaderService();
// Apply filters and navigate to page 2
await logReader.getPaginatedEntries({
levelFilter: "INFO",
searchTerm: "update",
page: 1,
});
expect(mockLogReader.getPaginatedEntries).toHaveBeenCalledWith(
expect.objectContaining({
levelFilter: "INFO",
searchTerm: "update",
page: 1,
})
);
});
test("clears filters when requested", async () => {
const clearedResults = {
...mockPaginatedData,
filters: { levelFilter: "ALL", searchTerm: "" },
};
mockLogReader.getPaginatedEntries.mockResolvedValue(clearedResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
levelFilter: "ALL",
searchTerm: "",
page: 0,
});
expect(result.filters.levelFilter).toBe("ALL");
expect(result.filters.searchTerm).toBe("");
});
});
describe("Performance Considerations", () => {
test("handles large result sets efficiently", async () => {
const largeResults = {
...mockPaginatedData,
pagination: {
...mockPaginatedData.pagination,
totalEntries: 1000,
totalPages: 50,
pageSize: 20,
},
};
mockLogReader.getPaginatedEntries.mockResolvedValue(largeResults);
const logReader = new LogReaderService();
const result = await logReader.getPaginatedEntries({
searchTerm: "update",
pageSize: 20,
});
expect(result.pagination.totalEntries).toBe(1000);
expect(result.pagination.totalPages).toBe(50);
});
test("limits results per page appropriately", async () => {
const logReader = new LogReaderService();
await logReader.getPaginatedEntries({ pageSize: 5 });
expect(mockLogReader.getPaginatedEntries).toHaveBeenCalledWith(
expect.objectContaining({ pageSize: 5 })
);
});
});
});