375 lines
11 KiB
JavaScript
375 lines
11 KiB
JavaScript
/**
|
|
* Enhanced ScheduleService Tests
|
|
* Tests for data persistence, state management, and concurrent access
|
|
* Requirements: 5.1, 5.4, 5.6
|
|
*/
|
|
|
|
const fs = require("fs");
|
|
const path = require("path");
|
|
const ScheduleService = require("../../../src/tui/services/ScheduleService");
|
|
|
|
describe("ScheduleService Enhanced Features", () => {
|
|
let scheduleService;
|
|
const testSchedulesFile = "test-schedules.json";
|
|
const testLockFile = "test-schedules.json.lock";
|
|
|
|
beforeEach(() => {
|
|
// Create service with test file
|
|
scheduleService = new ScheduleService();
|
|
scheduleService.schedulesFile = testSchedulesFile;
|
|
scheduleService.lockFile = testLockFile;
|
|
|
|
// Clean up any existing test files
|
|
[
|
|
testSchedulesFile,
|
|
testLockFile,
|
|
`${testSchedulesFile}.backup`,
|
|
`${testSchedulesFile}.tmp.${Date.now()}`,
|
|
].forEach((file) => {
|
|
try {
|
|
fs.unlinkSync(file);
|
|
} catch (error) {
|
|
// File doesn't exist, which is fine
|
|
}
|
|
});
|
|
});
|
|
|
|
afterEach(async () => {
|
|
// Cleanup
|
|
await scheduleService.cleanup();
|
|
|
|
// Remove test files
|
|
[testSchedulesFile, testLockFile, `${testSchedulesFile}.backup`].forEach(
|
|
(file) => {
|
|
try {
|
|
fs.unlinkSync(file);
|
|
} catch (error) {
|
|
// File doesn't exist, which is fine
|
|
}
|
|
}
|
|
);
|
|
});
|
|
|
|
describe("Data Persistence", () => {
|
|
test("should save schedules with metadata and checksum", async () => {
|
|
const testSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(), // Tomorrow
|
|
recurrence: "once",
|
|
description: "Test schedule",
|
|
enabled: true,
|
|
};
|
|
|
|
const savedSchedule = await scheduleService.addSchedule(testSchedule);
|
|
expect(savedSchedule.id).toBeDefined();
|
|
|
|
// Check file structure
|
|
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
|
const parsedData = JSON.parse(fileContent);
|
|
|
|
expect(parsedData.version).toBe("1.0");
|
|
expect(parsedData.lastModified).toBeDefined();
|
|
expect(parsedData.schedules).toHaveLength(1);
|
|
expect(parsedData.metadata.totalSchedules).toBe(1);
|
|
expect(parsedData.metadata.checksum).toBeDefined();
|
|
});
|
|
|
|
test("should create backup before saving", async () => {
|
|
// Create initial schedule
|
|
const schedule1 = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "once",
|
|
description: "First schedule",
|
|
};
|
|
|
|
await scheduleService.addSchedule(schedule1);
|
|
|
|
// Add another schedule (should create backup)
|
|
const schedule2 = {
|
|
operationType: "rollback",
|
|
scheduledTime: new Date(Date.now() + 172800000).toISOString(),
|
|
recurrence: "once",
|
|
description: "Second schedule",
|
|
};
|
|
|
|
await scheduleService.addSchedule(schedule2);
|
|
|
|
// Check that backup exists
|
|
expect(fs.existsSync(`${testSchedulesFile}.backup`)).toBe(true);
|
|
});
|
|
|
|
test("should verify data integrity with checksum", async () => {
|
|
const testSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "daily",
|
|
description: "Integrity test",
|
|
};
|
|
|
|
await scheduleService.addSchedule(testSchedule);
|
|
|
|
// Manually corrupt the file
|
|
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
|
const parsedData = JSON.parse(fileContent);
|
|
|
|
// Change checksum to simulate corruption
|
|
parsedData.metadata.checksum = "invalid-checksum";
|
|
fs.writeFileSync(testSchedulesFile, JSON.stringify(parsedData, null, 2));
|
|
|
|
// Loading should detect corruption
|
|
const newService = new ScheduleService();
|
|
newService.schedulesFile = testSchedulesFile;
|
|
|
|
await expect(newService.loadSchedules()).rejects.toThrow();
|
|
});
|
|
});
|
|
|
|
describe("File Locking", () => {
|
|
test("should acquire and release file locks", async () => {
|
|
await scheduleService.acquireFileLock();
|
|
expect(fs.existsSync(testLockFile)).toBe(true);
|
|
|
|
await scheduleService.releaseFileLock();
|
|
expect(fs.existsSync(testLockFile)).toBe(false);
|
|
});
|
|
|
|
test("should handle concurrent access attempts", async () => {
|
|
// Simulate concurrent access
|
|
const service1 = new ScheduleService();
|
|
const service2 = new ScheduleService();
|
|
service1.schedulesFile = testSchedulesFile;
|
|
service1.lockFile = testLockFile;
|
|
service2.schedulesFile = testSchedulesFile;
|
|
service2.lockFile = testLockFile;
|
|
|
|
// First service acquires lock
|
|
await service1.acquireFileLock();
|
|
|
|
// Second service should fail to acquire lock
|
|
await expect(service2.acquireFileLock()).rejects.toThrow(
|
|
/Failed to acquire file lock/
|
|
);
|
|
|
|
// Release first lock
|
|
await service1.releaseFileLock();
|
|
|
|
// Now second service should be able to acquire lock
|
|
await expect(service2.acquireFileLock()).resolves.not.toThrow();
|
|
await service2.releaseFileLock();
|
|
});
|
|
|
|
test("should handle stale lock files", async () => {
|
|
// Create a stale lock file
|
|
const staleLockData = {
|
|
pid: 99999,
|
|
timestamp: new Date(Date.now() - 10000).toISOString(), // 10 seconds ago
|
|
operation: "test",
|
|
};
|
|
fs.writeFileSync(testLockFile, JSON.stringify(staleLockData));
|
|
|
|
// Should be able to acquire lock by removing stale lock
|
|
await expect(scheduleService.acquireFileLock()).resolves.not.toThrow();
|
|
await scheduleService.releaseFileLock();
|
|
});
|
|
});
|
|
|
|
describe("Data Validation", () => {
|
|
test("should validate schedule data comprehensively", () => {
|
|
const validSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "weekly",
|
|
description: "Valid schedule",
|
|
enabled: true,
|
|
};
|
|
|
|
expect(() =>
|
|
scheduleService.validateScheduleData(validSchedule)
|
|
).not.toThrow();
|
|
});
|
|
|
|
test("should reject invalid operation types", () => {
|
|
const invalidSchedule = {
|
|
operationType: "invalid",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "once",
|
|
};
|
|
|
|
expect(() =>
|
|
scheduleService.validateScheduleData(invalidSchedule)
|
|
).toThrow(/must be one of: update, rollback/);
|
|
});
|
|
|
|
test("should reject past dates", () => {
|
|
const pastSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() - 86400000).toISOString(), // Yesterday
|
|
recurrence: "once",
|
|
};
|
|
|
|
expect(() => scheduleService.validateScheduleData(pastSchedule)).toThrow(
|
|
/must be in the future/
|
|
);
|
|
});
|
|
|
|
test("should validate description length", () => {
|
|
const longDescription = "x".repeat(501); // Exceeds 500 char limit
|
|
const invalidSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "once",
|
|
description: longDescription,
|
|
};
|
|
|
|
expect(() =>
|
|
scheduleService.validateScheduleData(invalidSchedule)
|
|
).toThrow(/must not exceed 500 characters/);
|
|
});
|
|
|
|
test("should prevent rollback operations from being recurring", () => {
|
|
const invalidSchedule = {
|
|
operationType: "rollback",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "daily", // Rollbacks should only be 'once'
|
|
};
|
|
|
|
expect(() =>
|
|
scheduleService.validateScheduleData(invalidSchedule)
|
|
).toThrow(/Rollback operations can only be scheduled once/);
|
|
});
|
|
});
|
|
|
|
describe("Error Recovery", () => {
|
|
test("should recover from corrupted files using backup", async () => {
|
|
// Create valid schedule first
|
|
const validSchedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "once",
|
|
description: "Recovery test",
|
|
};
|
|
|
|
await scheduleService.addSchedule(validSchedule);
|
|
|
|
// Corrupt the main file
|
|
fs.writeFileSync(testSchedulesFile, "invalid json content");
|
|
|
|
// Recovery should work
|
|
const recovered = await scheduleService.recoverFromCorruption();
|
|
expect(Array.isArray(recovered)).toBe(true);
|
|
});
|
|
|
|
test("should create empty file when no recovery possible", async () => {
|
|
// Create corrupted file with no backup
|
|
fs.writeFileSync(testSchedulesFile, "completely invalid");
|
|
|
|
const recovered = await scheduleService.recoverFromCorruption();
|
|
expect(recovered).toEqual([]);
|
|
|
|
// Should create new empty file
|
|
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
|
const parsedData = JSON.parse(fileContent);
|
|
expect(parsedData.schedules).toEqual([]);
|
|
});
|
|
});
|
|
|
|
describe("State Management", () => {
|
|
test("should cleanup resources properly", async () => {
|
|
// Add some schedules and create locks
|
|
await scheduleService.addSchedule({
|
|
operationType: "update",
|
|
scheduledTime: new Date(Date.now() + 86400000).toISOString(),
|
|
recurrence: "once",
|
|
});
|
|
|
|
await scheduleService.acquireFileLock();
|
|
|
|
// Cleanup should clear everything
|
|
scheduleService.cleanup();
|
|
|
|
expect(scheduleService.persistenceQueue).toEqual([]);
|
|
expect(scheduleService.isProcessingQueue).toBe(false);
|
|
expect(scheduleService.isLoaded).toBe(false);
|
|
});
|
|
|
|
test("should provide system state validation", async () => {
|
|
const report = await scheduleService.validateSystemState();
|
|
|
|
expect(report).toHaveProperty("fileExists");
|
|
expect(report).toHaveProperty("fileReadable");
|
|
expect(report).toHaveProperty("fileWritable");
|
|
expect(report).toHaveProperty("dataValid");
|
|
expect(report).toHaveProperty("issues");
|
|
expect(report).toHaveProperty("recommendations");
|
|
});
|
|
|
|
test("should provide service statistics", () => {
|
|
const stats = scheduleService.getServiceStats();
|
|
|
|
expect(stats).toHaveProperty("schedulesLoaded");
|
|
expect(stats).toHaveProperty("schedulesCount");
|
|
expect(stats).toHaveProperty("activeSchedules");
|
|
expect(stats).toHaveProperty("pendingOperations");
|
|
expect(stats).toHaveProperty("memoryUsage");
|
|
});
|
|
});
|
|
|
|
describe("Atomic Operations", () => {
|
|
test("should queue multiple save operations", async () => {
|
|
const promises = [];
|
|
|
|
// Queue multiple operations simultaneously
|
|
for (let i = 0; i < 5; i++) {
|
|
const schedule = {
|
|
operationType: "update",
|
|
scheduledTime: new Date(
|
|
Date.now() + 86400000 + i * 1000
|
|
).toISOString(),
|
|
recurrence: "once",
|
|
description: `Schedule ${i}`,
|
|
};
|
|
promises.push(scheduleService.addSchedule(schedule));
|
|
}
|
|
|
|
// All should complete successfully
|
|
const results = await Promise.all(promises);
|
|
expect(results).toHaveLength(5);
|
|
|
|
// All should have unique IDs
|
|
const ids = results.map((r) => r.id);
|
|
const uniqueIds = new Set(ids);
|
|
expect(uniqueIds.size).toBe(5);
|
|
});
|
|
|
|
test("should maintain data consistency during concurrent operations", async () => {
|
|
const operations = [];
|
|
|
|
// Create multiple concurrent add/update/delete operations
|
|
for (let i = 0; i < 3; i++) {
|
|
operations.push(
|
|
scheduleService.addSchedule({
|
|
operationType: "update",
|
|
scheduledTime: new Date(
|
|
Date.now() + 86400000 + i * 1000
|
|
).toISOString(),
|
|
recurrence: "once",
|
|
description: `Concurrent ${i}`,
|
|
})
|
|
);
|
|
}
|
|
|
|
const schedules = await Promise.all(operations);
|
|
|
|
// Verify all schedules were saved
|
|
const allSchedules = await scheduleService.getAllSchedules();
|
|
expect(allSchedules).toHaveLength(3);
|
|
|
|
// Verify data integrity
|
|
const fileContent = fs.readFileSync(testSchedulesFile, "utf8");
|
|
const parsedData = JSON.parse(fileContent);
|
|
expect(parsedData.metadata.totalSchedules).toBe(3);
|
|
});
|
|
});
|
|
});
|