Skip to content

Commit d8f3a48

Browse files
committed
feat(sync): enhance sync engine with logging and routing logic for improved operation tracking
1 parent 0325712 commit d8f3a48

7 files changed

Lines changed: 291 additions & 54 deletions

File tree

src/plugin/plugin.ts

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -217,18 +217,25 @@ function performInitialSync(pathConfig: PathConfig): void {
217217
void (async () => {
218218
try {
219219
log('Running initial sync in background...');
220-
const { categories } = await loadLocalData(pathConfig, config.sync);
220+
const loadStart = Date.now();
221+
const { categories, errors } = await loadLocalData(pathConfig, config.sync);
222+
log(`Loaded ${String(categories.length)} categories in ${String(Date.now() - loadStart)}ms`);
223+
if (errors.length > 0) log(`Load errors: ${String(errors.length)}`);
224+
225+
const syncStart = Date.now();
221226
const result = await engine.sync(categories);
227+
const dur = Date.now() - syncStart;
222228

223229
if (result.success && result.action !== 'error') {
224230
await persistLocalState(pathConfig);
225-
log(`Initial sync complete: ${result.message}`);
231+
log(`Initial sync complete in ${String(dur)}ms: ${result.message}`);
226232
} else {
227-
log(`Sync completed: ${result.message}`);
233+
log(`Sync completed in ${String(dur)}ms: ${result.message}`);
228234
}
229235
} catch (error) {
230236
const errMsg = error instanceof Error ? error.message : String(error);
231237
log(`WARNING: Initial sync failed: ${errMsg}`);
238+
if (error instanceof Error && error.stack) log(`Stack: ${error.stack}`);
232239
}
233240
})();
234241
}

src/storage/repo/fetch.ts

Lines changed: 70 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,26 +4,37 @@
44

55
import { RepoApiError, RepoRateLimitError } from './errors.js';
66

7+
/** Default timeout for API requests (30 seconds) */
8+
const DEFAULT_TIMEOUT_MS = 30000;
9+
710
/**
8-
* Fetch with exponential backoff retry.
11+
* Fetch with exponential backoff retry and timeout.
12+
* Handles rate limits by waiting until reset time.
913
*/
1014
export async function fetchWithRetry(
1115
url: string,
1216
options: RequestInit,
1317
maxRetries: number,
14-
retryDelayMs: number
18+
retryDelayMs: number,
19+
timeoutMs: number = DEFAULT_TIMEOUT_MS
1520
): Promise<Response> {
1621
let lastError: Error | undefined;
1722

1823
for (let attempt = 0; attempt <= maxRetries; attempt++) {
19-
const result = await attemptFetch(url, options);
24+
const result = await attemptFetch(url, options, timeoutMs);
2025
if (result.response) return result.response;
2126
if (result.shouldThrow && result.error) throw result.error;
2227
lastError = result.error;
2328

2429
if (attempt < maxRetries) {
25-
const delay = retryDelayMs * Math.pow(2, attempt);
26-
await sleep(delay);
30+
// For rate limit errors, wait until reset time (max 60s)
31+
if (lastError instanceof RepoRateLimitError) {
32+
const waitTime = Math.min(lastError.resetTimestamp * 1000 - Date.now(), 60000);
33+
if (waitTime > 0) await sleep(waitTime);
34+
} else {
35+
const delay = retryDelayMs * Math.pow(2, attempt);
36+
await sleep(delay);
37+
}
2738
}
2839
}
2940

@@ -36,32 +47,72 @@ interface FetchResult {
3647
shouldThrow?: boolean;
3748
}
3849

39-
async function attemptFetch(url: string, options: RequestInit): Promise<FetchResult> {
50+
async function attemptFetch(
51+
url: string,
52+
options: RequestInit,
53+
timeoutMs: number
54+
): Promise<FetchResult> {
4055
try {
41-
const response = await fetch(url, options);
42-
checkRateLimit(response);
56+
const controller = new AbortController();
57+
const timeoutId = setTimeout(() => {
58+
controller.abort();
59+
}, timeoutMs);
4360

44-
if (!response.ok) {
45-
const error = await createApiError(response);
46-
return { error, shouldThrow: isNonRetryableError(error) };
47-
}
61+
try {
62+
const response = await fetch(url, { ...options, signal: controller.signal });
63+
clearTimeout(timeoutId);
64+
checkRateLimit(response);
65+
66+
if (!response.ok) {
67+
const error = await createApiError(response);
68+
return { error, shouldThrow: isNonRetryableError(error) };
69+
}
4870

49-
return { response };
71+
return { response };
72+
} finally {
73+
clearTimeout(timeoutId);
74+
}
5075
} catch (error) {
5176
const err = error as Error;
77+
// Convert abort errors to timeout errors
78+
if (err.name === 'AbortError') {
79+
return {
80+
error: new Error(`Request timeout after ${String(timeoutMs)}ms`),
81+
shouldThrow: false,
82+
};
83+
}
5284
return { error: err, shouldThrow: isNonRetryableError(err) };
5385
}
5486
}
5587

5688
function checkRateLimit(response: Response): void {
57-
if (response.status !== 403) return;
89+
// Handle 429 Too Many Requests
90+
if (response.status === 429) {
91+
const retryAfter = response.headers.get('Retry-After');
92+
const resetTimestamp = retryAfter
93+
? Date.now() / 1000 + parseInt(retryAfter, 10)
94+
: Date.now() / 1000 + 60;
95+
throw new RepoRateLimitError(resetTimestamp);
96+
}
97+
98+
// Handle 403 with rate limit or abuse detection
99+
if (response.status === 403) {
100+
const remaining = response.headers.get('X-RateLimit-Remaining');
101+
const retryAfter = response.headers.get('Retry-After');
58102

59-
const remaining = response.headers.get('X-RateLimit-Remaining');
60-
if (remaining !== '0') return;
103+
// Secondary rate limit (abuse detection) includes Retry-After
104+
if (retryAfter) {
105+
const resetTimestamp = Date.now() / 1000 + parseInt(retryAfter, 10);
106+
throw new RepoRateLimitError(resetTimestamp);
107+
}
61108

62-
const resetTime = response.headers.get('X-RateLimit-Reset');
63-
const resetTimestamp = resetTime ? parseInt(resetTime, 10) : Date.now() / 1000 + 60;
64-
throw new RepoRateLimitError(resetTimestamp);
109+
// Primary rate limit
110+
if (remaining === '0') {
111+
const resetTime = response.headers.get('X-RateLimit-Reset');
112+
const resetTimestamp = resetTime ? parseInt(resetTime, 10) : Date.now() / 1000 + 60;
113+
throw new RepoRateLimitError(resetTimestamp);
114+
}
115+
}
65116
}
66117

67118
async function createApiError(response: Response): Promise<RepoApiError> {

src/storage/repo/repo-client.ts

Lines changed: 79 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -126,25 +126,51 @@ export class RepoStorageBackend implements StorageBackend {
126126
}
127127

128128
public async updateFiles(files: Record<string, string | null>): Promise<void> {
129+
const fileCount = Object.keys(files).length;
130+
const startTime = Date.now();
131+
129132
// Get current HEAD commit SHA
130133
const headSha = await this.getHeadSha();
131134

132135
// Get current tree
133136
const currentTree = await this.getTree(headSha);
134137

135-
// Build new tree entries
138+
// Build new tree entries (includes blob creation)
139+
const blobStart = Date.now();
136140
const treeEntries = await this.buildTreeEntries(files, currentTree);
141+
const blobDuration = Date.now() - blobStart;
142+
this.logProgress(`Created ${String(treeEntries.length)} blobs in ${String(blobDuration)}ms`);
137143

138144
// Create new tree
139145
const newTreeSha = await this.createTree(treeEntries, currentTree.sha);
140146

141147
// Create commit
142-
const fileCount = String(Object.keys(files).length);
143-
const message = `Sync update: ${fileCount} files`;
148+
const message = `Sync update: ${String(fileCount)} files`;
144149
const commitSha = await this.createCommit(message, newTreeSha, headSha);
145150

146151
// Update HEAD ref
147152
await this.updateRef(commitSha);
153+
154+
const totalDuration = Date.now() - startTime;
155+
this.logProgress(`Upload complete: ${String(fileCount)} files in ${String(totalDuration)}ms`);
156+
}
157+
158+
/** Log progress for debugging */
159+
private logProgress(message: string): void {
160+
try {
161+
// Use dynamic require to avoid module resolution issues
162+
/* eslint-disable @typescript-eslint/no-require-imports */
163+
const os = require('node:os') as { homedir: () => string };
164+
const fs = require('node:fs') as { appendFileSync: (path: string, data: string) => void };
165+
const path = require('node:path') as { join: (...parts: string[]) => string };
166+
/* eslint-enable @typescript-eslint/no-require-imports */
167+
const logDir = path.join(os.homedir(), '.local/share/opencode/log');
168+
const logFile = path.join(logDir, 'opencode-sync.log');
169+
const timestamp = new Date().toISOString();
170+
fs.appendFileSync(logFile, `${timestamp} [opencode-sync] [REPO] ${message}\n`);
171+
} catch {
172+
// Ignore logging errors
173+
}
148174
}
149175

150176
public async listFiles(): Promise<StorageFile[]> {
@@ -329,28 +355,68 @@ export class RepoStorageBackend implements StorageBackend {
329355
.filter((e) => !updatedPaths.has(e.path))
330356
.map((e) => ({ path: e.path, mode: e.mode, type: e.type, sha: e.sha }));
331357

332-
// Add new/updated files
358+
// Collect files that need blob creation
359+
const filesToUpload: { path: string; content: string }[] = [];
333360
for (const [path, content] of Object.entries(files)) {
334-
const fullPath = `${SYNC_DIR}/${path}`;
335-
336-
if (content === null) {
337-
// File deletion - already excluded from entries
338-
continue;
361+
if (content !== null) {
362+
filesToUpload.push({ path: `${SYNC_DIR}/${path}`, content });
339363
}
364+
}
340365

341-
// Create blob for new content
342-
const blobSha = await this.createBlob(content);
366+
// Create blobs in small parallel batches with delays to avoid secondary rate limits
367+
const BATCH_SIZE = 5;
368+
const blobResults = await this.createBlobsInBatches(filesToUpload, BATCH_SIZE);
369+
370+
// Add new entries from batch results
371+
for (const result of blobResults) {
343372
entries.push({
344-
path: fullPath,
373+
path: result.path,
345374
mode: '100644',
346375
type: 'blob',
347-
sha: blobSha,
376+
sha: result.sha,
348377
});
349378
}
350379

351380
return entries;
352381
}
353382

383+
/**
384+
* Create blobs in parallel batches with rate limit protection.
385+
* Uses small batches with delays to avoid GitHub's secondary rate limits.
386+
*/
387+
private async createBlobsInBatches(
388+
files: { path: string; content: string }[],
389+
batchSize: number
390+
): Promise<{ path: string; sha: string }[]> {
391+
const results: { path: string; sha: string }[] = [];
392+
const totalBatches = Math.ceil(files.length / batchSize);
393+
394+
for (let i = 0; i < files.length; i += batchSize) {
395+
const batchNum = Math.floor(i / batchSize) + 1;
396+
const batch = files.slice(i, i + batchSize);
397+
398+
if (files.length > batchSize) {
399+
this.logProgress(
400+
`Batch ${String(batchNum)}/${String(totalBatches)} (${String(batch.length)} files)`
401+
);
402+
}
403+
404+
const batchPromises = batch.map(async (file) => {
405+
const sha = await this.createBlob(file.content);
406+
return { path: file.path, sha };
407+
});
408+
const batchResults = await Promise.all(batchPromises);
409+
results.push(...batchResults);
410+
411+
// Add delay between batches to avoid secondary rate limits
412+
if (i + batchSize < files.length) {
413+
await new Promise((r) => setTimeout(r, 100));
414+
}
415+
}
416+
417+
return results;
418+
}
419+
354420
private async createBlob(content: string): Promise<string> {
355421
const body = JSON.stringify({
356422
content: Buffer.from(content).toString('base64'),

src/sync/engine/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,6 @@ export {
1818
buildNoChangeResult,
1919
handleSyncError,
2020
} from './result.js';
21+
export { syncLog, startOperation } from './logger.js';
2122
export { checkMaxRetries, calculateBackoff, sleep, MAX_CONFLICT_RETRIES } from './retry.js';
23+
export { determineAction, executeRoute } from './routing.js';

src/sync/engine/logger.ts

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/**
2+
* Sync Engine Logger
3+
*
4+
* Provides logging for sync operations with timestamps.
5+
* Uses file-based logging for persistence across sessions.
6+
*/
7+
8+
import { homedir } from 'node:os';
9+
import { appendFileSync } from 'node:fs';
10+
import { join } from 'node:path';
11+
12+
const LOG_PREFIX = '[opencode-sync]';
13+
14+
/**
15+
* Log a sync message to the log file.
16+
*/
17+
export function syncLog(message: string): void {
18+
const timestamp = new Date().toISOString();
19+
const logMessage = `${timestamp} ${LOG_PREFIX} ${message}`;
20+
21+
try {
22+
const logDir = join(homedir(), '.local/share/opencode/log');
23+
const logFile = join(logDir, 'opencode-sync.log');
24+
appendFileSync(logFile, logMessage + '\n');
25+
} catch {
26+
// Fallback to console if file write fails
27+
console.error(logMessage);
28+
}
29+
}
30+
31+
/**
32+
* Log a sync operation with timing.
33+
* Returns a function to call when the operation completes.
34+
*/
35+
export function startOperation(name: string): (result?: string) => void {
36+
const start = Date.now();
37+
syncLog(`[SYNC] Starting: ${name}`);
38+
return (result?: string) => {
39+
const duration = Date.now() - start;
40+
const suffix = result ? ` - ${result}` : '';
41+
syncLog(`[SYNC] Completed: ${name} (${String(duration)}ms)${suffix}`);
42+
};
43+
}
44+
45+
/**
46+
* Log detailed debug information.
47+
*/
48+
export function syncDebug(message: string): void {
49+
syncLog(`[DEBUG] ${message}`);
50+
}

0 commit comments

Comments
 (0)