fix: debounce race condition in tests, .catch() on LLM batch
- Added FactStore.flush() for immediate write (no debounce) - beforeEach flushes pending writes before cleanup - 3/3 consecutive runs: 83/83 pass, 0 fail - Added .catch() on processLlmBatchWhenReady() in hooks.ts (H1)
This commit is contained in:
parent
8964d93c60
commit
45cf11bb73
3 changed files with 17 additions and 1 deletions
|
|
@ -33,6 +33,16 @@ export class FactStore {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Immediately flushes any pending debounced writes.
|
||||
* Useful in tests and before shutdown to ensure data is persisted.
|
||||
*/
|
||||
public async flush(): Promise<void> {
|
||||
if (this.isLoaded) {
|
||||
await this.persist();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads facts from the `facts.json` file into the in-memory store.
|
||||
* If the file doesn't exist, it initializes an empty store.
|
||||
|
|
|
|||
|
|
@ -98,7 +98,8 @@ export class HookManager {
|
|||
if (this.llmEnhancer) {
|
||||
const messageId = `msg-${Date.now()}`;
|
||||
this.llmEnhancer.addToBatch({ id: messageId, text });
|
||||
this.processLlmBatchWhenReady();
|
||||
this.processLlmBatchWhenReady().catch(err =>
|
||||
this.logger.error('LLM batch processing failed', err as Error));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,11 @@ describe('FactStore', () => {
|
|||
after(async () => await fs.rm(testDir, { recursive: true, force: true }));
|
||||
|
||||
beforeEach(async () => {
|
||||
// Flush any pending debounced writes from the previous test
|
||||
// to prevent stale data from bleeding across test boundaries.
|
||||
if (factStore) {
|
||||
await factStore.flush();
|
||||
}
|
||||
const filePath = path.join(testDir, 'facts.json');
|
||||
try { await fs.unlink(filePath); } catch (e: unknown) {
|
||||
if ((e as NodeJS.ErrnoException).code !== 'ENOENT') throw e;
|
||||
|
|
|
|||
Loading…
Reference in a new issue