657 lines
19 KiB
JavaScript
657 lines
19 KiB
JavaScript
const fs = require("fs").promises;
|
|
const path = require("path");
|
|
const LogService = require("../../src/services/LogService");
|
|
|
|
// Mock fs module
|
|
jest.mock("fs", () => ({
|
|
promises: {
|
|
readdir: jest.fn(),
|
|
stat: jest.fn(),
|
|
readFile: jest.fn(),
|
|
},
|
|
}));
|
|
|
|
describe("LogService", () => {
|
|
let logService;
|
|
let mockLogContent;
|
|
|
|
beforeEach(() => {
|
|
jest.clearAllMocks();
|
|
logService = new LogService();
|
|
|
|
// Mock comprehensive log content
|
|
mockLogContent = `# Shopify Price Update Progress Log
|
|
|
|
This file tracks the progress of price update operations.
|
|
|
|
---
|
|
|
|
## Recent Operations
|
|
|
|
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
|
|
|
**Configuration:**
|
|
- Target Tag: Collection-Snowboard
|
|
- Price Adjustment: -10%
|
|
- Started: 2025-08-06 20:30:39 UTC
|
|
|
|
**Progress:**
|
|
- ✅ **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
|
- Variant: gid://shopify/ProductVariant/44236769263907
|
|
- Price: $600 → $540
|
|
- Compare At Price: $600
|
|
- Updated: 2025-08-06 20:30:40 UTC
|
|
- ❌ **Failed Product** (gid://shopify/Product/failed123)
|
|
- Variant: gid://shopify/ProductVariant/failed456
|
|
- Error: Rate limit exceeded
|
|
- Failed: 2025-08-06 20:30:41 UTC
|
|
|
|
**Summary:**
|
|
- Total Products Processed: 2
|
|
- Successful Updates: 1
|
|
- Failed Updates: 1
|
|
- Duration: 2 seconds
|
|
- Completed: 2025-08-06 20:30:42 UTC
|
|
|
|
---
|
|
|
|
## Price Rollback Operation - 2025-08-06 20:31:06 UTC
|
|
|
|
**Configuration:**
|
|
- Target Tag: Collection-Snowboard
|
|
- Operation Mode: rollback
|
|
- Started: 2025-08-06 20:31:06 UTC
|
|
|
|
**Progress:**
|
|
- 🔄 **The Collection Snowboard: Hydrogen** (gid://shopify/Product/8116504625443)
|
|
- Variant: gid://shopify/ProductVariant/44236769263907
|
|
- Price: $540 → $600 (from Compare At: $600)
|
|
- Rolled back: 2025-08-06 20:31:07 UTC
|
|
|
|
**Rollback Summary:**
|
|
- Total Products Processed: 1
|
|
- Total Variants Processed: 1
|
|
- Eligible Variants: 1
|
|
- Successful Rollbacks: 1
|
|
- Failed Rollbacks: 0
|
|
- Skipped Variants: 0 (no compare-at price)
|
|
- Duration: 1 seconds
|
|
- Completed: 2025-08-06 20:31:07 UTC
|
|
|
|
---
|
|
|
|
## Scheduled Update Operation - 2025-08-06 21:00:00 UTC
|
|
|
|
**Configuration:**
|
|
- Target Tag: Sale-Items
|
|
- Price Adjustment: -20%
|
|
- Scheduled: true
|
|
- Started: 2025-08-06 21:00:00 UTC
|
|
|
|
**Progress:**
|
|
|
|
**Summary:**
|
|
- Total Products Processed: 0
|
|
- Successful Updates: 0
|
|
- Failed Updates: 0
|
|
- Duration: 0 seconds
|
|
- Completed: 2025-08-06 21:00:00 UTC
|
|
|
|
---
|
|
|
|
**Error Analysis - 2025-08-06 20:31:10 UTC**
|
|
|
|
**Error Summary by Category:**
|
|
- Rate Limiting: 1 error
|
|
|
|
**Detailed Error Log:**
|
|
1. **Failed Product** (gid://shopify/Product/failed123)
|
|
- Variant: gid://shopify/ProductVariant/failed456
|
|
- Category: Rate Limiting
|
|
- Error: Rate limit exceeded (429)
|
|
`;
|
|
});
|
|
|
|
describe("getLogFiles()", () => {
|
|
test("discovers available log files successfully", async () => {
|
|
const mockFiles = [
|
|
"Progress.md",
|
|
"backup-log.md",
|
|
"other.txt",
|
|
"test-Progress.md",
|
|
];
|
|
const mockStats = {
|
|
size: 1024,
|
|
birthtime: new Date("2025-08-06T20:00:00Z"),
|
|
mtime: new Date("2025-08-06T20:30:00Z"),
|
|
};
|
|
|
|
fs.readdir.mockResolvedValue(mockFiles);
|
|
fs.stat.mockResolvedValue(mockStats);
|
|
fs.readFile.mockResolvedValue(mockLogContent);
|
|
|
|
const logFiles = await logService.getLogFiles();
|
|
|
|
expect(fs.readdir).toHaveBeenCalledWith(".");
|
|
expect(logFiles).toHaveLength(3); // Only .md files with Progress or log
|
|
expect(logFiles[0]).toMatchObject({
|
|
filename: expect.any(String),
|
|
path: expect.any(String),
|
|
size: 1024,
|
|
createdAt: expect.any(Date),
|
|
modifiedAt: expect.any(Date),
|
|
operationCount: expect.any(Number),
|
|
isMainLog: expect.any(Boolean),
|
|
});
|
|
});
|
|
|
|
test("identifies main log file correctly", async () => {
|
|
const mockFiles = ["Progress.md", "backup-log.md"];
|
|
const mockStats = {
|
|
size: 1024,
|
|
birthtime: new Date("2025-08-06T20:00:00Z"),
|
|
mtime: new Date("2025-08-06T20:30:00Z"),
|
|
};
|
|
|
|
fs.readdir.mockResolvedValue(mockFiles);
|
|
fs.stat.mockResolvedValue(mockStats);
|
|
fs.readFile.mockResolvedValue(mockLogContent);
|
|
|
|
const logFiles = await logService.getLogFiles();
|
|
|
|
const mainLog = logFiles.find((f) => f.isMainLog);
|
|
const backupLog = logFiles.find((f) => !f.isMainLog);
|
|
|
|
expect(mainLog.filename).toBe("Progress.md");
|
|
expect(backupLog.filename).toBe("backup-log.md");
|
|
});
|
|
|
|
test("counts operations in log files correctly", async () => {
|
|
const mockFiles = ["Progress.md"];
|
|
const mockStats = {
|
|
size: 1024,
|
|
birthtime: new Date("2025-08-06T20:00:00Z"),
|
|
mtime: new Date("2025-08-06T20:30:00Z"),
|
|
};
|
|
|
|
fs.readdir.mockResolvedValue(mockFiles);
|
|
fs.stat.mockResolvedValue(mockStats);
|
|
fs.readFile.mockResolvedValue(mockLogContent);
|
|
|
|
const logFiles = await logService.getLogFiles();
|
|
|
|
expect(logFiles[0].operationCount).toBe(3); // Three operations in mock content
|
|
});
|
|
|
|
test("sorts log files by modification time (newest first)", async () => {
|
|
const mockFiles = ["old-log.md", "new-log.md"];
|
|
const oldStats = {
|
|
size: 512,
|
|
birthtime: new Date("2025-08-05T20:00:00Z"),
|
|
mtime: new Date("2025-08-05T20:30:00Z"),
|
|
};
|
|
const newStats = {
|
|
size: 1024,
|
|
birthtime: new Date("2025-08-06T20:00:00Z"),
|
|
mtime: new Date("2025-08-06T20:30:00Z"),
|
|
};
|
|
|
|
fs.readdir.mockResolvedValue(mockFiles);
|
|
fs.stat.mockResolvedValueOnce(oldStats).mockResolvedValueOnce(newStats);
|
|
fs.readFile.mockResolvedValue(
|
|
"## Test Operation - 2025-08-06 20:00:00 UTC"
|
|
);
|
|
|
|
const logFiles = await logService.getLogFiles();
|
|
|
|
expect(logFiles[0].filename).toBe("new-log.md");
|
|
expect(logFiles[1].filename).toBe("old-log.md");
|
|
});
|
|
|
|
test("handles directory read errors", async () => {
|
|
fs.readdir.mockRejectedValue(new Error("Permission denied"));
|
|
|
|
await expect(logService.getLogFiles()).rejects.toThrow(
|
|
"Failed to discover log files: Permission denied"
|
|
);
|
|
});
|
|
|
|
test("skips files that cannot be read", async () => {
|
|
const mockFiles = ["Progress.md", "corrupted-log.md"];
|
|
const mockStats = {
|
|
size: 1024,
|
|
birthtime: new Date("2025-08-06T20:00:00Z"),
|
|
mtime: new Date("2025-08-06T20:30:00Z"),
|
|
};
|
|
|
|
fs.readdir.mockResolvedValue(mockFiles);
|
|
fs.stat.mockResolvedValue(mockStats);
|
|
fs.readFile
|
|
.mockResolvedValueOnce(mockLogContent)
|
|
.mockRejectedValueOnce(new Error("File corrupted"));
|
|
|
|
const logFiles = await logService.getLogFiles();
|
|
|
|
expect(logFiles).toHaveLength(1);
|
|
expect(logFiles[0].filename).toBe("Progress.md");
|
|
});
|
|
});
|
|
|
|
describe("readLogFile()", () => {
|
|
test("reads Progress.md content by default", async () => {
|
|
fs.readFile.mockResolvedValue(mockLogContent);
|
|
|
|
const content = await logService.readLogFile();
|
|
|
|
expect(fs.readFile).toHaveBeenCalledWith("Progress.md", "utf8");
|
|
expect(content).toBe(mockLogContent);
|
|
});
|
|
|
|
test("reads specified log file", async () => {
|
|
const customContent = "# Custom Log Content";
|
|
fs.readFile.mockResolvedValue(customContent);
|
|
|
|
const content = await logService.readLogFile("custom-log.md");
|
|
|
|
expect(fs.readFile).toHaveBeenCalledWith("custom-log.md", "utf8");
|
|
expect(content).toBe(customContent);
|
|
});
|
|
|
|
test("handles absolute file paths", async () => {
|
|
const absolutePath = "/absolute/path/to/log.md";
|
|
fs.readFile.mockResolvedValue(mockLogContent);
|
|
|
|
await logService.readLogFile(absolutePath);
|
|
|
|
expect(fs.readFile).toHaveBeenCalledWith(absolutePath, "utf8");
|
|
});
|
|
|
|
test("throws error when file not found", async () => {
|
|
const error = new Error("File not found");
|
|
error.code = "ENOENT";
|
|
fs.readFile.mockRejectedValue(error);
|
|
|
|
await expect(logService.readLogFile("nonexistent.md")).rejects.toThrow(
|
|
"Log file not found: nonexistent.md"
|
|
);
|
|
});
|
|
|
|
test("throws error for other file system errors", async () => {
|
|
const error = new Error("Permission denied");
|
|
error.code = "EACCES";
|
|
fs.readFile.mockRejectedValue(error);
|
|
|
|
await expect(logService.readLogFile()).rejects.toThrow(
|
|
"Failed to read log file: Permission denied"
|
|
);
|
|
});
|
|
});
|
|
|
|
describe("parseLogContent()", () => {
|
|
test("parses log content into structured entries", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
expect(entries).toHaveLength(3);
|
|
expect(entries.every((entry) => entry.id)).toBe(true);
|
|
expect(entries.every((entry) => entry.timestamp instanceof Date)).toBe(
|
|
true
|
|
);
|
|
});
|
|
|
|
test("identifies operation types correctly", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
const updateOp = entries.find((e) => e.type === "update");
|
|
const rollbackOp = entries.find((e) => e.type === "rollback");
|
|
const scheduledOp = entries.find((e) => e.type === "scheduled");
|
|
|
|
expect(updateOp).toBeDefined();
|
|
expect(rollbackOp).toBeDefined();
|
|
expect(scheduledOp).toBeDefined();
|
|
});
|
|
|
|
test("parses configuration sections correctly", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
const updateOp = entries.find((e) => e.type === "update");
|
|
expect(updateOp.configuration["Target Tag"]).toBe("Collection-Snowboard");
|
|
expect(updateOp.configuration["Price Adjustment"]).toBe("-10%");
|
|
expect(updateOp.details).toContain("Target Tag: Collection-Snowboard");
|
|
});
|
|
|
|
test("parses progress sections correctly", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
const updateOp = entries.find((e) => e.type === "update");
|
|
expect(updateOp.progress).toHaveLength(2); // One success, one failure
|
|
|
|
const successProgress = updateOp.progress.find(
|
|
(p) => p.status === "success"
|
|
);
|
|
const failedProgress = updateOp.progress.find(
|
|
(p) => p.status === "failed"
|
|
);
|
|
|
|
expect(successProgress.productTitle).toBe(
|
|
"The Collection Snowboard: Hydrogen"
|
|
);
|
|
expect(failedProgress.productTitle).toBe("Failed Product");
|
|
});
|
|
|
|
test("parses summary sections correctly", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
const updateOp = entries.find((e) => e.type === "update");
|
|
expect(updateOp.summary["Total Products Processed"]).toBe("2");
|
|
expect(updateOp.summary["Successful Updates"]).toBe("1");
|
|
expect(updateOp.summary["Failed Updates"]).toBe("1");
|
|
});
|
|
|
|
test("determines operation status correctly", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
const updateOp = entries.find((e) => e.type === "update");
|
|
const rollbackOp = entries.find((e) => e.type === "rollback");
|
|
const scheduledOp = entries.find((e) => e.type === "scheduled");
|
|
|
|
// Update operation has errors, should be failed
|
|
expect(updateOp.status).toBe("completed"); // Has summary
|
|
expect(rollbackOp.status).toBe("completed"); // Has summary, no errors
|
|
expect(scheduledOp.status).toBe("completed"); // Has summary
|
|
});
|
|
|
|
test("sorts entries by timestamp (newest first)", () => {
|
|
const entries = logService.parseLogContent(mockLogContent);
|
|
|
|
// Scheduled (21:00:00) should come first, then Rollback (20:31:06), then Update (20:30:39)
|
|
expect(entries[0].type).toBe("scheduled");
|
|
expect(entries[1].type).toBe("rollback");
|
|
expect(entries[2].type).toBe("update");
|
|
});
|
|
|
|
test("handles malformed content gracefully", () => {
|
|
const malformedContent = `
|
|
# Invalid Log
|
|
Random text without proper structure
|
|
## Invalid header without timestamp
|
|
- Some random line
|
|
`;
|
|
|
|
const entries = logService.parseLogContent(malformedContent);
|
|
|
|
expect(Array.isArray(entries)).toBe(true);
|
|
expect(entries).toHaveLength(0); // No valid operations found
|
|
});
|
|
|
|
test("handles invalid timestamps gracefully", () => {
|
|
const invalidTimestampContent = `
|
|
## Price Update Operation - 2025-13-45 99:99:99 UTC
|
|
|
|
**Configuration:**
|
|
- Target Tag: test
|
|
|
|
**Summary:**
|
|
- Total Products Processed: 0
|
|
`;
|
|
|
|
const entries = logService.parseLogContent(invalidTimestampContent);
|
|
|
|
expect(entries).toHaveLength(1);
|
|
expect(entries[0].timestamp).toBeInstanceOf(Date);
|
|
});
|
|
});
|
|
|
|
describe("filterLogs()", () => {
|
|
let sampleEntries;
|
|
|
|
beforeEach(() => {
|
|
sampleEntries = logService.parseLogContent(mockLogContent);
|
|
});
|
|
|
|
test("filters by date range - today", () => {
|
|
const today = new Date();
|
|
const todayEntry = {
|
|
...sampleEntries[0],
|
|
timestamp: today,
|
|
};
|
|
const testEntries = [todayEntry, ...sampleEntries];
|
|
|
|
const filtered = logService.filterLogs(testEntries, {
|
|
dateRange: "today",
|
|
});
|
|
|
|
expect(filtered).toHaveLength(1);
|
|
expect(filtered[0].timestamp.toDateString()).toBe(today.toDateString());
|
|
});
|
|
|
|
test("filters by date range - yesterday", () => {
|
|
const yesterday = new Date();
|
|
yesterday.setDate(yesterday.getDate() - 1);
|
|
const yesterdayEntry = {
|
|
...sampleEntries[0],
|
|
timestamp: yesterday,
|
|
};
|
|
const testEntries = [yesterdayEntry, ...sampleEntries];
|
|
|
|
const filtered = logService.filterLogs(testEntries, {
|
|
dateRange: "yesterday",
|
|
});
|
|
|
|
expect(filtered).toHaveLength(1);
|
|
expect(filtered[0].timestamp.toDateString()).toBe(
|
|
yesterday.toDateString()
|
|
);
|
|
});
|
|
|
|
test("filters by date range - week", () => {
|
|
const weekAgo = new Date();
|
|
weekAgo.setDate(weekAgo.getDate() - 3); // 3 days ago, within week
|
|
const weekEntry = {
|
|
...sampleEntries[0],
|
|
timestamp: weekAgo,
|
|
};
|
|
const testEntries = [weekEntry, ...sampleEntries];
|
|
|
|
const filtered = logService.filterLogs(testEntries, {
|
|
dateRange: "week",
|
|
});
|
|
|
|
expect(filtered.length).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("filters by operation type", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
operationType: "update",
|
|
});
|
|
|
|
expect(filtered.every((entry) => entry.type === "update")).toBe(true);
|
|
expect(filtered.length).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("filters by status", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
status: "completed",
|
|
});
|
|
|
|
expect(filtered.every((entry) => entry.status === "completed")).toBe(
|
|
true
|
|
);
|
|
expect(filtered.length).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("filters by search term in title", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
searchTerm: "Rollback",
|
|
});
|
|
|
|
expect(filtered.length).toBeGreaterThan(0);
|
|
expect(filtered.some((entry) => entry.title.includes("Rollback"))).toBe(
|
|
true
|
|
);
|
|
});
|
|
|
|
test("filters by search term in configuration", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
searchTerm: "Collection-Snowboard",
|
|
});
|
|
|
|
expect(filtered.length).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("combines multiple filters", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
operationType: "update",
|
|
status: "completed",
|
|
searchTerm: "Collection",
|
|
});
|
|
|
|
expect(filtered.every((entry) => entry.type === "update")).toBe(true);
|
|
expect(filtered.every((entry) => entry.status === "completed")).toBe(
|
|
true
|
|
);
|
|
});
|
|
|
|
test("returns empty array for non-matching filters", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {
|
|
searchTerm: "nonexistent-term-xyz",
|
|
});
|
|
|
|
expect(filtered).toHaveLength(0);
|
|
});
|
|
|
|
test("returns all entries when no filters applied", () => {
|
|
const filtered = logService.filterLogs(sampleEntries, {});
|
|
|
|
expect(filtered).toHaveLength(sampleEntries.length);
|
|
});
|
|
});
|
|
|
|
describe("paginateLogs()", () => {
|
|
let sampleEntries;
|
|
|
|
beforeEach(() => {
|
|
sampleEntries = logService.parseLogContent(mockLogContent);
|
|
});
|
|
|
|
test("paginates logs correctly - first page", () => {
|
|
const result = logService.paginateLogs(sampleEntries, 0, 2);
|
|
|
|
expect(result.entries).toHaveLength(2);
|
|
expect(result.pagination.currentPage).toBe(0);
|
|
expect(result.pagination.pageSize).toBe(2);
|
|
expect(result.pagination.totalEntries).toBe(sampleEntries.length);
|
|
expect(result.pagination.totalPages).toBe(
|
|
Math.ceil(sampleEntries.length / 2)
|
|
);
|
|
expect(result.pagination.hasNextPage).toBe(true);
|
|
expect(result.pagination.hasPreviousPage).toBe(false);
|
|
expect(result.pagination.startIndex).toBe(1);
|
|
expect(result.pagination.endIndex).toBe(2);
|
|
});
|
|
|
|
test("paginates logs correctly - last page", () => {
|
|
const totalPages = Math.ceil(sampleEntries.length / 2);
|
|
const lastPage = totalPages - 1;
|
|
const result = logService.paginateLogs(sampleEntries, lastPage, 2);
|
|
|
|
expect(result.pagination.currentPage).toBe(lastPage);
|
|
expect(result.pagination.hasNextPage).toBe(false);
|
|
expect(result.pagination.hasPreviousPage).toBe(true);
|
|
});
|
|
|
|
test("handles empty log array", () => {
|
|
const result = logService.paginateLogs([], 0, 10);
|
|
|
|
expect(result.entries).toHaveLength(0);
|
|
expect(result.pagination.totalEntries).toBe(0);
|
|
expect(result.pagination.totalPages).toBe(0);
|
|
expect(result.pagination.hasNextPage).toBe(false);
|
|
expect(result.pagination.hasPreviousPage).toBe(false);
|
|
});
|
|
|
|
test("uses default pagination parameters", () => {
|
|
const result = logService.paginateLogs(sampleEntries);
|
|
|
|
expect(result.pagination.currentPage).toBe(0);
|
|
expect(result.pagination.pageSize).toBe(20);
|
|
});
|
|
|
|
test("handles page size larger than total entries", () => {
|
|
const result = logService.paginateLogs(sampleEntries, 0, 100);
|
|
|
|
expect(result.entries).toHaveLength(sampleEntries.length);
|
|
expect(result.pagination.totalPages).toBe(1);
|
|
expect(result.pagination.hasNextPage).toBe(false);
|
|
});
|
|
|
|
test("calculates pagination metadata correctly", () => {
|
|
const result = logService.paginateLogs(sampleEntries, 1, 1);
|
|
|
|
expect(result.pagination.startIndex).toBe(2);
|
|
expect(result.pagination.endIndex).toBe(2);
|
|
expect(result.pagination.currentPage).toBe(1);
|
|
});
|
|
});
|
|
|
|
describe("Private Methods", () => {
|
|
test("_parseOperationType identifies operation types correctly", () => {
|
|
expect(logService._parseOperationType("Price Update Operation")).toBe(
|
|
"update"
|
|
);
|
|
expect(logService._parseOperationType("Price Rollback Operation")).toBe(
|
|
"rollback"
|
|
);
|
|
expect(logService._parseOperationType("Scheduled Update Operation")).toBe(
|
|
"scheduled"
|
|
);
|
|
expect(logService._parseOperationType("Unknown Operation")).toBe(
|
|
"unknown"
|
|
);
|
|
});
|
|
|
|
test("_parseTimestamp handles various timestamp formats", () => {
|
|
const timestamp1 = logService._parseTimestamp("2025-08-06 20:30:39 UTC");
|
|
const timestamp2 = logService._parseTimestamp("invalid-timestamp");
|
|
|
|
expect(timestamp1).toEqual(new Date("2025-08-06T20:30:39Z"));
|
|
expect(timestamp2).toBeInstanceOf(Date);
|
|
});
|
|
});
|
|
|
|
describe("Error Handling", () => {
|
|
test("handles empty log content", () => {
|
|
const entries = logService.parseLogContent("");
|
|
|
|
expect(entries).toEqual([]);
|
|
});
|
|
|
|
test("handles log content with only headers", () => {
|
|
const headerOnlyContent = `
|
|
# Shopify Price Update Progress Log
|
|
## Recent Operations
|
|
---
|
|
`;
|
|
|
|
const entries = logService.parseLogContent(headerOnlyContent);
|
|
|
|
expect(entries).toEqual([]);
|
|
});
|
|
|
|
test("handles partial operation entries", () => {
|
|
const partialContent = `
|
|
## Price Update Operation - 2025-08-06 20:30:39 UTC
|
|
|
|
**Configuration:**
|
|
- Target Tag: test
|
|
|
|
# End of file
|
|
`;
|
|
|
|
const entries = logService.parseLogContent(partialContent);
|
|
|
|
expect(entries).toHaveLength(1);
|
|
expect(entries[0].configuration["Target Tag"]).toBe("test");
|
|
});
|
|
});
|
|
});
|