|
| 1 | +import { describe, it, expect } from "vitest"; |
| 2 | +import { ChunkCache } from "./ChunkCache"; |
| 3 | + |
| 4 | +describe("ChunkCache", () => { |
| 5 | + describe("basic operations", () => { |
| 6 | + it("should create a cache with specified max size", () => { |
| 7 | + const cache = new ChunkCache(5); |
| 8 | + expect(cache.maxSize).toBe(5); |
| 9 | + expect(cache.index).toBe(0); |
| 10 | + }); |
| 11 | + |
| 12 | + it("should add and retrieve chunks", () => { |
| 13 | + const cache = new ChunkCache(5); |
| 14 | + |
| 15 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 16 | + cache.addChunkToCache("hash2", 200, 20, null); |
| 17 | + |
| 18 | + const chunk1 = cache.getChunk("hash1", null); |
| 19 | + const chunk2 = cache.getChunk("hash2", null); |
| 20 | + |
| 21 | + expect(chunk1).toEqual({ xorbIndex: 100, chunkIndex: 10 }); |
| 22 | + expect(chunk2).toEqual({ xorbIndex: 200, chunkIndex: 20 }); |
| 23 | + }); |
| 24 | + |
| 25 | + it("should return undefined for non-existent chunks", () => { |
| 26 | + const cache = new ChunkCache(5); |
| 27 | + |
| 28 | + const chunk = cache.getChunk("nonexistent", null); |
| 29 | + expect(chunk).toBeUndefined(); |
| 30 | + }); |
| 31 | + |
| 32 | + it("should remove chunks from cache", () => { |
| 33 | + const cache = new ChunkCache(5); |
| 34 | + |
| 35 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 36 | + expect(cache.getChunk("hash1", null)).toBeDefined(); |
| 37 | + |
| 38 | + cache.removeChunkFromCache("hash1"); |
| 39 | + expect(cache.getChunk("hash1", null)).toBeUndefined(); |
| 40 | + }); |
| 41 | + }); |
| 42 | + |
| 43 | + describe("duplicate handling", () => { |
| 44 | + it("should ignore duplicate hashes", () => { |
| 45 | + const cache = new ChunkCache(5); |
| 46 | + |
| 47 | + // Add initial chunk |
| 48 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 49 | + expect(cache.index).toBe(1); |
| 50 | + expect(cache.map.size).toBe(1); |
| 51 | + |
| 52 | + // Try to add same hash again - should be ignored |
| 53 | + cache.addChunkToCache("hash1", 999, 99, null); |
| 54 | + expect(cache.index).toBe(1); // index should not increment |
| 55 | + expect(cache.map.size).toBe(1); // map size should not increase |
| 56 | + |
| 57 | + // Original data should be preserved |
| 58 | + const chunk = cache.getChunk("hash1", null); |
| 59 | + expect(chunk).toEqual({ xorbIndex: 100, chunkIndex: 10 }); |
| 60 | + }); |
| 61 | + |
| 62 | + it("should maintain consistency when adding duplicates mixed with new hashes", () => { |
| 63 | + const cache = new ChunkCache(5); |
| 64 | + |
| 65 | + // Add some chunks |
| 66 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 67 | + cache.addChunkToCache("hash2", 200, 20, null); |
| 68 | + cache.addChunkToCache("hash3", 300, 30, null); |
| 69 | + |
| 70 | + expect(cache.index).toBe(3); |
| 71 | + expect(cache.map.size).toBe(3); |
| 72 | + |
| 73 | + // Try to add duplicates |
| 74 | + cache.addChunkToCache("hash1", 999, 99, null); // duplicate |
| 75 | + cache.addChunkToCache("hash4", 400, 40, null); // new |
| 76 | + cache.addChunkToCache("hash2", 888, 88, null); // duplicate |
| 77 | + |
| 78 | + expect(cache.index).toBe(4); // only incremented for hash4 |
| 79 | + expect(cache.map.size).toBe(4); |
| 80 | + |
| 81 | + // Verify all chunks are accessible and have correct data |
| 82 | + expect(cache.getChunk("hash1", null)).toEqual({ xorbIndex: 100, chunkIndex: 10 }); |
| 83 | + expect(cache.getChunk("hash2", null)).toEqual({ xorbIndex: 200, chunkIndex: 20 }); |
| 84 | + expect(cache.getChunk("hash3", null)).toEqual({ xorbIndex: 300, chunkIndex: 30 }); |
| 85 | + expect(cache.getChunk("hash4", null)).toEqual({ xorbIndex: 400, chunkIndex: 40 }); |
| 86 | + }); |
| 87 | + }); |
| 88 | + |
| 89 | + describe("cache overflow and circular buffer behavior", () => { |
| 90 | + it("should handle cache overflow correctly", () => { |
| 91 | + const cache = new ChunkCache(3); // Small cache size |
| 92 | + |
| 93 | + // Fill the cache to capacity |
| 94 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 95 | + cache.addChunkToCache("hash2", 200, 20, null); |
| 96 | + cache.addChunkToCache("hash3", 300, 30, null); |
| 97 | + |
| 98 | + expect(cache.index).toBe(0); // wrapped around (3 % 3 = 0) |
| 99 | + expect(cache.map.size).toBe(3); |
| 100 | + |
| 101 | + // All chunks should be accessible |
| 102 | + expect(cache.getChunk("hash1", null)).toBeDefined(); |
| 103 | + expect(cache.getChunk("hash2", null)).toBeDefined(); |
| 104 | + expect(cache.getChunk("hash3", null)).toBeDefined(); |
| 105 | + |
| 106 | + // Add one more chunk - should evict the oldest (hash1) |
| 107 | + cache.addChunkToCache("hash4", 400, 40, null); |
| 108 | + |
| 109 | + expect(cache.index).toBe(1); // wrapped around (4 % 3 = 1) |
| 110 | + expect(cache.map.size).toBe(3); // size should remain the same |
| 111 | + |
| 112 | + // hash1 should be evicted, others should remain |
| 113 | + expect(cache.getChunk("hash1", null)).toBeUndefined(); |
| 114 | + expect(cache.getChunk("hash2", null)).toEqual({ xorbIndex: 200, chunkIndex: 20 }); |
| 115 | + expect(cache.getChunk("hash3", null)).toEqual({ xorbIndex: 300, chunkIndex: 30 }); |
| 116 | + expect(cache.getChunk("hash4", null)).toEqual({ xorbIndex: 400, chunkIndex: 40 }); |
| 117 | + }); |
| 118 | + |
| 119 | + it("should continue evicting oldest entries as new ones are added", () => { |
| 120 | + const cache = new ChunkCache(3); |
| 121 | + |
| 122 | + // Fill cache |
| 123 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 124 | + cache.addChunkToCache("hash2", 200, 20, null); |
| 125 | + cache.addChunkToCache("hash3", 300, 30, null); |
| 126 | + |
| 127 | + // Add more chunks to test multiple evictions |
| 128 | + cache.addChunkToCache("hash4", 400, 40, null); // evicts hash1 |
| 129 | + cache.addChunkToCache("hash5", 500, 50, null); // evicts hash2 |
| 130 | + cache.addChunkToCache("hash6", 600, 60, null); // evicts hash3 |
| 131 | + |
| 132 | + expect(cache.map.size).toBe(3); |
| 133 | + |
| 134 | + // Only the last 3 should remain |
| 135 | + expect(cache.getChunk("hash1", null)).toBeUndefined(); |
| 136 | + expect(cache.getChunk("hash2", null)).toBeUndefined(); |
| 137 | + expect(cache.getChunk("hash3", null)).toBeUndefined(); |
| 138 | + expect(cache.getChunk("hash4", null)).toEqual({ xorbIndex: 400, chunkIndex: 40 }); |
| 139 | + expect(cache.getChunk("hash5", null)).toEqual({ xorbIndex: 500, chunkIndex: 50 }); |
| 140 | + expect(cache.getChunk("hash6", null)).toEqual({ xorbIndex: 600, chunkIndex: 60 }); |
| 141 | + }); |
| 142 | + |
| 143 | + it("should handle removals during overflow correctly", () => { |
| 144 | + const cache = new ChunkCache(3); |
| 145 | + |
| 146 | + // Fill cache |
| 147 | + cache.addChunkToCache("hash1", 100, 10, null); |
| 148 | + cache.addChunkToCache("hash2", 200, 20, null); |
| 149 | + cache.addChunkToCache("hash3", 300, 30, null); |
| 150 | + |
| 151 | + // Remove middle element |
| 152 | + cache.removeChunkFromCache("hash2"); |
| 153 | + expect(cache.map.size).toBe(2); |
| 154 | + |
| 155 | + // Add new elements |
| 156 | + cache.addChunkToCache("hash4", 400, 40, null); |
| 157 | + cache.addChunkToCache("hash5", 500, 50, null); |
| 158 | + |
| 159 | + // The removal should not affect the eviction logic |
| 160 | + expect(cache.getChunk("hash1", null)).toBeUndefined(); // evicted |
| 161 | + expect(cache.getChunk("hash2", null)).toBeUndefined(); // removed |
| 162 | + expect(cache.getChunk("hash3", null)).toEqual({ xorbIndex: 300, chunkIndex: 30 }); |
| 163 | + expect(cache.getChunk("hash4", null)).toEqual({ xorbIndex: 400, chunkIndex: 40 }); |
| 164 | + expect(cache.getChunk("hash5", null)).toEqual({ xorbIndex: 500, chunkIndex: 50 }); |
| 165 | + }); |
| 166 | + }); |
| 167 | + |
| 168 | + describe("consistency after operations", () => { |
| 169 | + it("should maintain consistent state after mixed operations", () => { |
| 170 | + const cache = new ChunkCache(4); |
| 171 | + |
| 172 | + // Add initial chunks |
| 173 | + cache.addChunkToCache("a", 1, 10, null); |
| 174 | + cache.addChunkToCache("b", 2, 20, null); |
| 175 | + cache.addChunkToCache("c", 3, 30, null); |
| 176 | + |
| 177 | + // Mix of operations |
| 178 | + cache.addChunkToCache("a", 999, 999, null); // duplicate (ignored) |
| 179 | + cache.removeChunkFromCache("b"); // removal |
| 180 | + cache.addChunkToCache("d", 4, 40, null); // new addition |
| 181 | + cache.addChunkToCache("e", 5, 50, null); // new addition - this triggers overflow |
| 182 | + cache.addChunkToCache("c", 888, 888, null); // duplicate (ignored) |
| 183 | + |
| 184 | + // Verify final state |
| 185 | + // With cache size 4: a(0), b(1, removed), c(2), d(3), e(4 -> 0, wraps and evicts a) |
| 186 | + expect(cache.getChunk("a", null)).toBeUndefined(); // evicted by e |
| 187 | + expect(cache.getChunk("b", null)).toBeUndefined(); // removed |
| 188 | + expect(cache.getChunk("c", null)).toEqual({ xorbIndex: 3, chunkIndex: 30 }); |
| 189 | + expect(cache.getChunk("d", null)).toEqual({ xorbIndex: 4, chunkIndex: 40 }); |
| 190 | + expect(cache.getChunk("e", null)).toEqual({ xorbIndex: 5, chunkIndex: 50 }); |
| 191 | + |
| 192 | + // Map size should be 3 (a evicted, b removed) |
| 193 | + expect(cache.map.size).toBe(3); |
| 194 | + }); |
| 195 | + |
| 196 | + it("should maintain consistency after cache overflow with mixed operations", () => { |
| 197 | + const cache = new ChunkCache(3); |
| 198 | + |
| 199 | + // Fill cache |
| 200 | + cache.addChunkToCache("first", 1, 1, null); |
| 201 | + cache.addChunkToCache("second", 2, 2, null); |
| 202 | + cache.addChunkToCache("third", 3, 3, null); |
| 203 | + |
| 204 | + // Cause overflow with duplicates and removals mixed in |
| 205 | + cache.addChunkToCache("fourth", 4, 4, null); // evicts "first" |
| 206 | + cache.addChunkToCache("second", 999, 999, null); // duplicate (ignored) |
| 207 | + cache.removeChunkFromCache("third"); // removal |
| 208 | + cache.addChunkToCache("fifth", 5, 5, null); // evicts "second" |
| 209 | + |
| 210 | + // Final state verification |
| 211 | + expect(cache.getChunk("first", null)).toBeUndefined(); // evicted |
| 212 | + expect(cache.getChunk("second", null)).toBeUndefined(); // evicted |
| 213 | + expect(cache.getChunk("third", null)).toBeUndefined(); // removed |
| 214 | + expect(cache.getChunk("fourth", null)).toEqual({ xorbIndex: 4, chunkIndex: 4 }); |
| 215 | + expect(cache.getChunk("fifth", null)).toEqual({ xorbIndex: 5, chunkIndex: 5 }); |
| 216 | + |
| 217 | + cache.addChunkToCache("sixth", 6, 6, null); // new, takes "third" place |
| 218 | + expect(cache.getChunk("sixth", null)).toEqual({ xorbIndex: 6, chunkIndex: 6 }); |
| 219 | + expect(cache.getChunk("fourth", null)).toEqual({ xorbIndex: 4, chunkIndex: 4 }); |
| 220 | + expect(cache.getChunk("fifth", null)).toEqual({ xorbIndex: 5, chunkIndex: 5 }); |
| 221 | + |
| 222 | + cache.addChunkToCache("seventh", 7, 7, null); // new, takes "fourth" place |
| 223 | + expect(cache.getChunk("seventh", null)).toEqual({ xorbIndex: 7, chunkIndex: 7 }); |
| 224 | + expect(cache.getChunk("fourth", null)).toBeUndefined(); // evicted |
| 225 | + expect(cache.getChunk("fifth", null)).toEqual({ xorbIndex: 5, chunkIndex: 5 }); |
| 226 | + |
| 227 | + expect(cache.map.size).toBe(3); |
| 228 | + }); |
| 229 | + }); |
| 230 | + |
| 231 | + describe("negative xorbIndex handling", () => { |
| 232 | + it("should handle negative xorbIndex values (remote xorbs)", () => { |
| 233 | + const cache = new ChunkCache(5); |
| 234 | + |
| 235 | + cache.addChunkToCache("remote1", -1, 10, null); |
| 236 | + cache.addChunkToCache("local1", 1, 20, null); |
| 237 | + cache.addChunkToCache("remote2", -100, 30, null); |
| 238 | + |
| 239 | + expect(cache.getChunk("remote1", null)).toEqual({ xorbIndex: -1, chunkIndex: 10 }); |
| 240 | + expect(cache.getChunk("local1", null)).toEqual({ xorbIndex: 1, chunkIndex: 20 }); |
| 241 | + expect(cache.getChunk("remote2", null)).toEqual({ xorbIndex: -100, chunkIndex: 30 }); |
| 242 | + }); |
| 243 | + }); |
| 244 | + |
| 245 | + describe("edge cases", () => { |
| 246 | + it("should handle cache with max size of 1", () => { |
| 247 | + const cache = new ChunkCache(1); |
| 248 | + |
| 249 | + cache.addChunkToCache("hash1", 1, 1, null); |
| 250 | + expect(cache.getChunk("hash1", null)).toBeDefined(); |
| 251 | + |
| 252 | + cache.addChunkToCache("hash2", 2, 2, null); |
| 253 | + expect(cache.getChunk("hash1", null)).toBeUndefined(); // evicted |
| 254 | + expect(cache.getChunk("hash2", null)).toEqual({ xorbIndex: 2, chunkIndex: 2 }); |
| 255 | + }); |
| 256 | + |
| 257 | + it("should handle empty cache operations", () => { |
| 258 | + const cache = new ChunkCache(5); |
| 259 | + |
| 260 | + expect(cache.getChunk("nonexistent", null)).toBeUndefined(); |
| 261 | + cache.removeChunkFromCache("nonexistent"); // should not throw |
| 262 | + expect(cache.map.size).toBe(0); |
| 263 | + }); |
| 264 | + }); |
| 265 | +}); |
0 commit comments