11import type { SearchIndex } from 'algoliasearch' ;
22import type { QueueObject } from 'async' ;
33import { queue } from 'async' ;
4- import ms from 'ms' ;
54import type { DatabaseChangesResultItem , DocumentLookupFailure } from 'nano' ;
65
76import type { StateManager } from './StateManager' ;
8- import { config } from './config' ;
97import * as npm from './npm' ;
108import saveDocs from './saveDocs' ;
119import { datadog } from './utils/datadog' ;
1210import { log } from './utils/log' ;
1311import * as sentry from './utils/sentry' ;
1412
15- let loopStart = Date . now ( ) ;
1613let totalSequence : number ; // Cached npmInfo.seq
1714let changesConsumer : QueueObject < DatabaseChangesResultItem > ;
1815
1916/**
20- * Run watch and catchup.
21- *
22- * --- Catchup ?
23- * If the bootstrap is long or the process has been stopped long enough,
24- * we are lagging behind few changes.
25- * Catchup() will paginate through changes that we have missed.
17+ * Run watch.
2618 *
2719 * --- Watch ?
2820 * Watch is "Long Polled. This mode is not paginated and the event system in CouchDB send
@@ -43,12 +35,6 @@ let changesConsumer: QueueObject<DatabaseChangesResultItem>;
4335 * until an other package is updated.
4436 * It will never be up to date because he receive event at the same pace
4537 * as they arrive in listener A, even if it's not the same package.
46- *
47- *
48- * --- We could use catchup with a timeout between poll then?
49- * Yes !
50- * When we are catched up, we could await between poll and we will receive N changes.
51- * But long-polling is more efficient in term of bandwidth and more reactive.
5238 */
5339async function run (
5440 stateManager : StateManager ,
@@ -60,54 +46,11 @@ async function run(
6046
6147 changesConsumer = createChangeConsumer ( stateManager , mainIndex ) ;
6248
63- await catchup ( stateManager ) ;
64-
65- log . info ( '🚀 Index is up to date, watch mode activated' ) ;
66-
6749 await watch ( stateManager ) ;
6850
6951 log . info ( '🚀 watch is done' ) ;
7052}
7153
72- /**
73- * Loop through all changes that may have been missed.
74- */
75- async function catchup ( stateManager : StateManager ) : Promise < void > {
76- let hasCaughtUp : boolean = false ;
77-
78- while ( ! hasCaughtUp ) {
79- loopStart = Date . now ( ) ;
80-
81- try {
82- const npmInfo = await npm . getInfo ( ) ;
83- totalSequence = npmInfo . seq ;
84-
85- const { seq } = await stateManager . get ( ) ;
86-
87- log . info ( '🚀 Catchup: continue since sequence [%d]' , seq ) ;
88-
89- // Get one chunk of changes from registry
90- const changes = await npm . getChanges ( {
91- since : seq ,
92- limit : config . replicateConcurrency ,
93- include_docs : true ,
94- } ) ;
95-
96- for ( const change of changes . results ) {
97- changesConsumer . push ( change ) ;
98- }
99- await changesConsumer . drain ( ) ;
100-
101- const newState = await stateManager . get ( ) ;
102- if ( newState . seq ! >= totalSequence ) {
103- hasCaughtUp = true ;
104- }
105- } catch ( err ) {
106- sentry . report ( err ) ;
107- }
108- }
109- }
110-
11154/**
11255 * Active synchronous mode with Registry.
11356 * Changes are polled with a keep-alived connection.
@@ -144,7 +87,7 @@ async function watch(stateManager: StateManager): Promise<true> {
14487}
14588
14689/**
147- * Process changes.
90+ * Process changes in order .
14891 */
14992async function loop (
15093 mainIndex : SearchIndex ,
@@ -180,20 +123,15 @@ async function loop(
180123}
181124
182125/**
183- * Log our process through catchup/ watch.
126+ * Log our process through watch.
184127 *
185128 */
186- function logProgress ( seq : number , nbChanges : number ) : void {
187- const ratePerSecond = nbChanges / ( ( Date . now ( ) - loopStart ) / 1000 ) ;
188- const remaining = ( ( totalSequence - seq ) / ratePerSecond ) * 1000 || 0 ;
189-
129+ function logProgress ( seq : number ) : void {
190130 log . info (
191- `🚀 Synced %d/%d changes (%d%), current rate: %d changes/s (%s remaining) ` ,
131+ `🚀 Synced %d/%d changes (%d%)` ,
192132 seq ,
193133 totalSequence ,
194- Math . floor ( ( Math . max ( seq , 1 ) / totalSequence ) * 100 ) ,
195- Math . round ( ratePerSecond ) ,
196- ms ( remaining )
134+ Math . floor ( ( Math . max ( seq , 1 ) / totalSequence ) * 100 )
197135 ) ;
198136}
199137
@@ -220,7 +158,7 @@ function createChangeConsumer(
220158 await stateManager . save ( {
221159 seq,
222160 } ) ;
223- logProgress ( seq , 1 ) ;
161+ logProgress ( seq ) ;
224162 } catch ( err ) {
225163 sentry . report ( err ) ;
226164 }
0 commit comments