@@ -29,6 +29,8 @@ export interface PullOptions {
2929 backend : StorageBackend ;
3030 /** Map of category → item ID → checksum for existing local items */
3131 localChecksums ?: Record < SyncCategory , Record < string , string > > ;
32+ /** Map of category → filename → git SHA for incremental pull */
33+ localRemoteShas ?: Partial < Record < SyncCategory , Record < string , string > > > ;
3234}
3335
3436/**
@@ -39,6 +41,8 @@ export interface ExtendedPullResult extends PullResult {
3941 downloadedItems : Record < SyncCategory , string [ ] > ;
4042 /** Items that should be deleted locally (tombstoned remotely) */
4143 tombstonedItems : Record < SyncCategory , Record < string , Tombstone > > ;
44+ /** Remote SHAs for all synced items (for incremental pull) */
45+ remoteShas : Partial < Record < SyncCategory , Record < string , string > > > ;
4246}
4347
4448/** Result accumulator for pull operation */
@@ -47,6 +51,7 @@ interface PullAccumulator {
4751 changedCategories : SyncCategory [ ] ;
4852 downloadedItems : Record < SyncCategory , string [ ] > ;
4953 tombstonedItems : Record < SyncCategory , Record < string , Tombstone > > ;
54+ remoteShas : Partial < Record < SyncCategory , Record < string , string > > > ;
5055}
5156
5257/** Info about a file to fetch */
@@ -63,6 +68,7 @@ function createPullAccumulator(): PullAccumulator {
6368 changedCategories : [ ] ,
6469 downloadedItems : { } as Record < SyncCategory , string [ ] > ,
6570 tombstonedItems : { } as Record < SyncCategory , Record < string , Tombstone > > ,
71+ remoteShas : { } ,
6672 } ;
6773}
6874
@@ -71,7 +77,8 @@ function recordItemPull(
7177 acc : PullAccumulator ,
7278 cat : SyncCategory ,
7379 data : ItemCategoryData ,
74- tombstones : Record < string , Tombstone >
80+ tombstones : Record < string , Tombstone > ,
81+ shas : Record < string , string >
7582) : void {
7683 if ( Object . keys ( data . items ) . length > 0 ) {
7784 acc . pulledData . push ( data ) ;
@@ -81,24 +88,36 @@ function recordItemPull(
8188 if ( Object . keys ( tombstones ) . length > 0 ) {
8289 acc . tombstonedItems [ cat ] = tombstones ;
8390 }
91+ // Always record SHAs (merge with existing for incremental)
92+ acc . remoteShas [ cat ] = { ...( acc . remoteShas [ cat ] ?? { } ) , ...shas } ;
8493}
8594
86- /** Build list of files to fetch, excluding tombstoned items */
95+ /** Build list of files to fetch, excluding tombstoned and unchanged items */
8796function buildFilesToFetch (
8897 categoryFiles : StorageFile [ ] ,
89- categoryTombstones : Record < string , Tombstone >
98+ categoryTombstones : Record < string , Tombstone > ,
99+ localShas ?: Record < string , string >
90100) : FetchInfo [ ] {
91101 const filesToFetch : FetchInfo [ ] = [ ] ;
102+ let skipped = 0 ;
92103 for ( const file of categoryFiles ) {
93104 const itemId = getItemIdFromFilename ( file . filename ) ;
94105 if ( ! itemId ) continue ;
95106 if ( itemId in categoryTombstones ) continue ;
107+ // Skip if local has same SHA (unchanged)
108+ if ( localShas && file . sha && localShas [ itemId ] === file . sha ) {
109+ skipped ++ ;
110+ continue ;
111+ }
96112 filesToFetch . push ( {
97113 itemId,
98114 filename : file . filename ,
99115 checksum : file . sha ?? '' ,
100116 } ) ;
101117 }
118+ if ( skipped > 0 ) {
119+ syncLog ( `[PULL] Skipped ${ String ( skipped ) } unchanged files` ) ;
120+ }
102121 return filesToFetch ;
103122}
104123
@@ -127,8 +146,27 @@ function processDownloadedFiles(
127146 return { items, checksums } ;
128147}
129148
149+ /** Build SHA map from fetched files */
150+ function buildShaMap ( filesToFetch : FetchInfo [ ] ) : Record < string , string > {
151+ const shas : Record < string , string > = { } ;
152+ for ( const f of filesToFetch ) {
153+ if ( f . checksum ) shas [ f . itemId ] = f . checksum ;
154+ }
155+ return shas ;
156+ }
157+
158+ /** Load tombstones for a category */
159+ async function loadCategoryTombstones (
160+ backend : StorageBackend ,
161+ cat : SyncCategory
162+ ) : Promise < Record < string , Tombstone > > {
163+ const content = await backend . getFile ( TOMBSTONES_FILENAME ) ;
164+ const file = parseTombstonesFile ( content ) ;
165+ return getCategoryTombstones ( file , cat ) ;
166+ }
167+
130168export async function pullCategories ( options : PullOptions ) : Promise < ExtendedPullResult > {
131- const { manifest, enabledCategories, backend, localChecksums } = options ;
169+ const { manifest, enabledCategories, backend, localRemoteShas } = options ;
132170 const acc = createPullAccumulator ( ) ;
133171
134172 for ( const [ category , info ] of Object . entries ( manifest . categories ) ) {
@@ -138,7 +176,7 @@ export async function pullCategories(options: PullOptions): Promise<ExtendedPull
138176 continue ;
139177 }
140178 syncLog ( `[PULL] Processing ${ cat } (type: ${ info . type } )` ) ;
141- await pullTreeIndexedCategory ( cat , info , localChecksums ?. [ cat ] ?? { } , backend , acc ) ;
179+ await pullTreeIndexedCategory ( cat , info , localRemoteShas ?. [ cat ] , backend , acc ) ;
142180 }
143181
144182 return acc ;
@@ -150,7 +188,7 @@ export async function pullCategories(options: PullOptions): Promise<ExtendedPull
150188async function pullTreeIndexedCategory (
151189 cat : SyncCategory ,
152190 info : TreeIndexedCategoryInfo ,
153- _localChecksums : Record < string , string > ,
191+ localShas : Record < string , string > | undefined ,
154192 backend : StorageBackend ,
155193 acc : PullAccumulator
156194) : Promise < void > {
@@ -160,33 +198,28 @@ async function pullTreeIndexedCategory(
160198 const allFiles = await backend . listFiles ( ) ;
161199 const categoryFiles = allFiles . filter ( ( f ) => f . filename . startsWith ( info . pathPrefix ) ) ;
162200 syncLog ( `[PULL] ${ cat } : found ${ String ( categoryFiles . length ) } files in tree` ) ;
163-
164201 if ( categoryFiles . length === 0 ) return ;
165202
166- // Load tombstones from separate file
167- const tombstonesContent = await backend . getFile ( TOMBSTONES_FILENAME ) ;
168- const tombstonesFile = parseTombstonesFile ( tombstonesContent ) ;
169- const categoryTombstones = getCategoryTombstones ( tombstonesFile , cat ) ;
203+ // Load tombstones and build file list
204+ const categoryTombstones = await loadCategoryTombstones ( backend , cat ) ;
170205 syncLog ( `[PULL] ${ cat } : ${ String ( Object . keys ( categoryTombstones ) . length ) } tombstones` ) ;
171-
172- // Build list of files to download (excluding tombstoned items)
173- const filesToFetch = buildFilesToFetch ( categoryFiles , categoryTombstones ) ;
206+ const filesToFetch = buildFilesToFetch ( categoryFiles , categoryTombstones , localShas ) ;
207+ syncLog ( `[PULL] ${ cat } : ${ String ( filesToFetch . length ) } files to fetch` ) ;
174208
175209 if ( filesToFetch . length === 0 ) {
176- if ( Object . keys ( categoryTombstones ) . length > 0 ) {
177- acc . tombstonedItems [ cat ] = categoryTombstones ;
178- }
210+ if ( Object . keys ( categoryTombstones ) . length > 0 ) acc . tombstonedItems [ cat ] = categoryTombstones ;
179211 return ;
180212 }
181213
182214 // Bulk download and process files
215+ syncLog ( `[PULL] ${ cat } : starting bulk download...` ) ;
183216 const contents = await backend . getFiles ( filesToFetch . map ( ( f ) => f . filename ) ) ;
184217 const { items, checksums } = processDownloadedFiles ( filesToFetch , contents ) ;
185218 syncLog (
186219 `[PULL] ${ cat } : downloaded ${ String ( Object . keys ( items ) . length ) } /${ String ( filesToFetch . length ) } items`
187220 ) ;
188221
189- // Record results
222+ // Record results with SHAs for incremental sync
190223 const data : ItemCategoryData = { category : cat , type : 'items' , items, checksums } ;
191- recordItemPull ( acc , cat , data , categoryTombstones ) ;
224+ recordItemPull ( acc , cat , data , categoryTombstones , buildShaMap ( filesToFetch ) ) ;
192225}
0 commit comments