@@ -11,13 +11,110 @@ export function end_hydrating() {
11
11
is_hydrating = false ;
12
12
}
13
13
14
- export function append ( target : Node & { actual_end_child ?: Node | null } , node : Node ) {
14
+ type NodeEx = Node & {
15
+ claim_order ?: number ,
16
+ hydrate_init ? : true ,
17
+ is_in_lis ?: true ,
18
+ actual_end_child ?: Node ,
19
+ childNodes : NodeListOf < NodeEx > ,
20
+ } ;
21
+
22
+ function upper_bound ( low : number , high : number , key : ( index : number ) => number , value : number ) {
23
+ // Return first index of value larger than input value in the range [low, high)
24
+ while ( low < high ) {
25
+ const mid = low + ( ( high - low ) >> 1 ) ;
26
+ if ( key ( mid ) <= value ) {
27
+ low = mid + 1 ;
28
+ } else {
29
+ high = mid ;
30
+ }
31
+ }
32
+ return low ;
33
+ }
34
+
35
+ function init_hydrate ( target : NodeEx ) {
36
+ if ( target . hydrate_init ) return ;
37
+ target . hydrate_init = true ;
38
+
39
+ // We know that all children have claim_order values since the unclaimed have been detached
40
+ const children = target . childNodes as NodeListOf < NodeEx & { claim_order : number } > ;
41
+
42
+ /*
43
+ * Reorder claimed children optimally.
44
+ * We can reorder claimed children optimally by finding the longest subsequence of
45
+ * nodes that are already claimed in order and only moving the rest. The longest
46
+ * subsequence subsequence of nodes that are claimed in order can be found by
47
+ * computing the longest increasing subsequence of .claim_order values.
48
+ *
49
+ * This algorithm is optimal in generating the least amount of reorder operations
50
+ * possible.
51
+ *
52
+ * Proof:
53
+ * We know that, given a set of reordering operations, the nodes that do not move
54
+ * always form an increasing subsequence, since they do not move among each other
55
+ * meaning that they must be already ordered among each other. Thus, the maximal
56
+ * set of nodes that do not move form a longest increasing subsequence.
57
+ */
58
+
59
+ // Compute longest increasing subsequence
60
+ // m: subsequence length j => index k of smallest value that ends an incresing subsequence of length j
61
+ const m = new Int32Array ( children . length + 1 ) ;
62
+ // Predecessor indices + 1
63
+ const p = new Int32Array ( children . length ) ;
64
+
65
+ m [ 0 ] = - 1 ;
66
+ let longest = 0 ;
67
+ for ( let i = 0 ; i < children . length ; i ++ ) {
68
+ const current = children [ i ] . claim_order ;
69
+ // Find the largest subsequence length such that it ends in a value less than our current value
70
+
71
+ // upper_bound returns first greater value, so we subtract one
72
+ const seqLen = upper_bound ( 1 , longest + 1 , idx => children [ m [ idx ] ] . claim_order , current ) - 1 ;
73
+
74
+ p [ i ] = m [ seqLen ] + 1 ;
75
+
76
+ const newLen = seqLen + 1 ;
77
+
78
+ // We can guarantee that current is the smallest value. Otherwise, we would have generated a longer sequence.
79
+ m [ newLen ] = i ;
80
+
81
+ longest = Math . max ( newLen , longest ) ;
82
+ }
83
+
84
+ // The longest increasing subsequence of nodes (initially reversed)
85
+ const lis = [ ] ;
86
+ for ( let cur = m [ longest ] + 1 ; cur != 0 ; cur = p [ cur - 1 ] ) {
87
+ const node = children [ cur - 1 ] ;
88
+ lis . push ( node ) ;
89
+ node . is_in_lis = true ;
90
+ }
91
+ lis . reverse ( ) ;
92
+
93
+ // Move all nodes that aren't in the longest increasing subsequence
94
+ const toMove : NodeEx [ ] = [ ] ;
95
+ for ( let i = 0 ; i < children . length ; i ++ ) {
96
+ if ( ! children [ i ] . is_in_lis ) {
97
+ toMove . push ( children [ i ] ) ;
98
+ }
99
+ }
100
+
101
+ toMove . forEach ( ( node ) => {
102
+ const idx = upper_bound ( 0 , lis . length , idx => lis [ idx ] . claim_order , node . claim_order ) ;
103
+ if ( ( idx == 0 ) || ( lis [ idx - 1 ] . claim_order != node . claim_order ) ) {
104
+ const nxt = idx == lis . length ? null : lis [ idx ] ;
105
+ target . insertBefore ( node , nxt ) ;
106
+ }
107
+ } ) ;
108
+ }
109
+
110
+ export function append ( target : NodeEx , node : NodeEx ) {
15
111
if ( is_hydrating ) {
16
- // If we are just starting with this target, we will insert before the firstChild (which may be null)
17
- if ( target . actual_end_child === undefined ) {
112
+ init_hydrate ( target ) ;
113
+
114
+ if ( ( target . actual_end_child === undefined ) || ( ( target . actual_end_child !== null ) && ( target . actual_end_child . parentElement !== target ) ) ) {
18
115
target . actual_end_child = target . firstChild ;
19
116
}
20
- if ( node . parentNode !== target ) {
117
+ if ( node !== target . actual_end_child ) {
21
118
target . insertBefore ( node , target . actual_end_child ) ;
22
119
} else {
23
120
target . actual_end_child = node . nextSibling ;
@@ -27,7 +124,7 @@ export function append(target: Node & {actual_end_child?: Node | null}, node: No
27
124
}
28
125
}
29
126
30
- export function insert ( target : Node , node : Node , anchor ?: Node ) {
127
+ export function insert ( target : NodeEx , node : NodeEx , anchor ?: NodeEx ) {
31
128
if ( is_hydrating && ! anchor ) {
32
129
append ( target , node ) ;
33
130
} else if ( node . parentNode !== target || ( anchor && node . nextSibling !== anchor ) ) {
@@ -176,53 +273,75 @@ export function time_ranges_to_array(ranges) {
176
273
return array ;
177
274
}
178
275
179
- export function children ( element : HTMLElement ) {
180
- return Array . from ( element . childNodes ) ;
181
- }
276
+ type ChildNodeEx = ChildNode & NodeEx ;
182
277
183
- type ChildNodeArray = ChildNode [ ] & {
184
- /**
185
- * All nodes at or after this index are available for preservation (not getting detached)
186
- */
187
- lastKeepIndex ?: number ;
278
+ type ChildNodeArray = ChildNodeEx [ ] & {
279
+ claim_info ?: {
280
+ /**
281
+ * The index of the last claimed element
282
+ */
283
+ last_index : number ;
284
+ /**
285
+ * The total number of elements claimed
286
+ */
287
+ total_claimed : number ;
288
+ }
188
289
} ;
189
290
190
- function claim_node < R extends ChildNode > ( nodes : ChildNodeArray , predicate : ( node : ChildNode ) => node is R , processNode : ( node : ChildNode ) => void , createNode : ( ) => R ) {
191
- if ( nodes . lastKeepIndex === undefined ) {
192
- nodes . lastKeepIndex = 0 ;
193
- }
194
-
195
- // We first try to find a node we can actually keep without detaching
196
- // This node should be after the previous node that we chose to keep without detaching
197
- for ( let i = nodes . lastKeepIndex ; i < nodes . length ; i ++ ) {
198
- const node = nodes [ i ] ;
199
-
200
- if ( predicate ( node ) ) {
201
- processNode ( node ) ;
291
+ export function children ( element : Element ) {
292
+ return Array . from ( element . childNodes ) ;
293
+ }
202
294
203
- nodes . splice ( i , 1 ) ;
204
- nodes . lastKeepIndex = i ;
205
- return node ;
206
- }
295
+ function claim_node < R extends ChildNodeEx > ( nodes : ChildNodeArray , predicate : ( node : ChildNodeEx ) => node is R , processNode : ( node : ChildNodeEx ) => void , createNode : ( ) => R , dontUpdateLastIndex : boolean = false ) {
296
+ // Try to find nodes in an order such that we lengthen the longest increasing subsequence
297
+ if ( nodes . claim_info === undefined ) {
298
+ nodes . claim_info = { last_index : 0 , total_claimed : 0 } ;
207
299
}
208
300
209
-
210
- // Otherwise, we try to find a node that we should detach
211
- for ( let i = 0 ; i < nodes . lastKeepIndex ; i ++ ) {
212
- const node = nodes [ i ] ;
301
+ const resultNode = ( ( ) => {
302
+ // We first try to find an element after the previous one
303
+ for ( let i = nodes . claim_info . last_index ; i < nodes . length ; i ++ ) {
304
+ const node = nodes [ i ] ;
305
+
306
+ if ( predicate ( node ) ) {
307
+ processNode ( node ) ;
308
+
309
+ nodes . splice ( i , 1 ) ;
310
+ if ( ! dontUpdateLastIndex ) {
311
+ nodes . claim_info . last_index = i ;
312
+ }
313
+ return node ;
314
+ }
315
+ }
213
316
214
- if ( predicate ( node ) ) {
215
- processNode ( node ) ;
216
-
217
- nodes . splice ( i , 1 ) ;
218
- nodes . lastKeepIndex -= 1 ;
219
- detach ( node ) ;
220
- return node ;
317
+
318
+ // Otherwise, we try to find one before
319
+ // We iterate in reverse so that we don't go too far back
320
+ for ( let i = nodes . claim_info . last_index - 1 ; i >= 0 ; i -- ) {
321
+ const node = nodes [ i ] ;
322
+
323
+ if ( predicate ( node ) ) {
324
+ processNode ( node ) ;
325
+
326
+ nodes . splice ( i , 1 ) ;
327
+ if ( ! dontUpdateLastIndex ) {
328
+ nodes . claim_info . last_index = i ;
329
+ } else {
330
+ // Since we spliced before the last_index, we decrease it
331
+ nodes . claim_info . last_index -- ;
332
+ }
333
+ detach ( node ) ;
334
+ return node ;
335
+ }
221
336
}
222
- }
337
+
338
+ // If we can't find any matching node, we create a new one
339
+ return createNode ( ) ;
340
+ } ) ( ) ;
223
341
224
- // If we can't find any matching node, we create a new one
225
- return createNode ( ) ;
342
+ resultNode . claim_order = nodes . claim_info . total_claimed ;
343
+ nodes . claim_info . total_claimed += 1 ;
344
+ return resultNode ;
226
345
}
227
346
228
347
export function claim_element ( nodes : ChildNodeArray , name : string , attributes : { [ key : string ] : boolean } , svg ) {
@@ -247,8 +366,9 @@ export function claim_text(nodes: ChildNodeArray, data) {
247
366
return claim_node < Text > (
248
367
nodes ,
249
368
( node : ChildNode ) : node is Text => node . nodeType === 3 ,
250
- ( node : Text ) => node . data = '' + data ,
251
- ( ) => text ( data )
369
+ ( node : Text ) => { node . data = '' + data } ,
370
+ ( ) => text ( data ) ,
371
+ true // Text nodes should not update last index since it is likely not worth it to eliminate an increasing subsequence of actual elements
252
372
) ;
253
373
}
254
374
0 commit comments