Skip to content

Commit cab079b

Browse files
committed
feat(descriptors): minimize blocking the UI
This adds a "pauser" into the descriptor converter which avoids blocking the UI by scheduling idle callbacks. See the comment in the code for justification and potential follow-up work.
1 parent 15e97a6 commit cab079b

File tree

2 files changed

+122
-7
lines changed

2 files changed

+122
-7
lines changed

packages/@sanity/schema/src/descriptors/convert.ts

Lines changed: 96 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,64 @@ const MAX_DEPTH_UKNOWN = 5
5151

5252
type UnknownRecord<T> = {[P in keyof T]: unknown}
5353

54+
/**
55+
* A helper class for doing work without interrupting the UI.
56+
* This also records the duration that did indeed block the UI.
57+
*/
58+
class Pauser {
59+
durations: number[]
60+
lastNow: number | undefined
61+
62+
constructor(durations: number[]) {
63+
this.durations = durations
64+
}
65+
66+
/**
67+
* Yields control back to the UI.
68+
*/
69+
pause(): Promise<void> {
70+
return new Promise((resolve) => {
71+
const done = () => {
72+
this._recordDuration()
73+
resolve()
74+
}
75+
76+
if (typeof requestIdleCallback === 'function') {
77+
requestIdleCallback(done, {timeout: 10})
78+
} else if (typeof requestAnimationFrame === 'function') {
79+
requestAnimationFrame(done)
80+
} else {
81+
setTimeout(done, 0)
82+
}
83+
})
84+
}
85+
86+
/**
87+
* This should be invoked when it's done in order to record the final duration as well.
88+
**/
89+
end() {
90+
this._recordDuration()
91+
}
92+
93+
/** Executes a function for each item while pausing in between. */
94+
async map<T, U>(arr: T[], fn: (val: T) => U): Promise<U[]> {
95+
const result: U[] = []
96+
for (const val of arr) {
97+
await this.pause()
98+
result.push(fn(val))
99+
}
100+
return result
101+
}
102+
103+
_recordDuration() {
104+
const now = performance.now()
105+
if (this.lastNow) {
106+
this.durations.push(now - this.lastNow)
107+
}
108+
this.lastNow = now
109+
}
110+
}
111+
54112
export class DescriptorConverter {
55113
cache: WeakMap<Schema, SetSynchronization<RegistryType>> = new WeakMap()
56114

@@ -59,18 +117,49 @@ export class DescriptorConverter {
59117
*
60118
* This is automatically cached in a weak map.
61119
*/
62-
async get(schema: Schema): Promise<SetSynchronization<RegistryType>> {
120+
async get(
121+
schema: Schema,
122+
opts?: {
123+
/** If present, this array will be populated with the durations we blocked the UI. */
124+
pauseDurations?: number[]
125+
},
126+
): Promise<SetSynchronization<RegistryType>> {
127+
/*
128+
Converting the schema into a descriptor consists of two parts:
129+
130+
1. Traversing the type into a descriptor.
131+
2. Serializing the descriptor, including SHA256 hashing.
132+
133+
Note that only (2) can be done in a background worker since the type
134+
itself isn't serializable (which is a requirement for a background
135+
worker). In addition, we expect (2) to scale in the same way as (1): If it
136+
takes X milliseconds to traverse the type into a descriptor it will
137+
probably take c*X milliseconds to serialize it.
138+
139+
This means that a background worker actually doesn't give us that much
140+
value. A huge type will either way be expensive to convert from a type to
141+
a descriptor. Therefore this function currently only avoid blocking by
142+
only processing each type separately.
143+
144+
If we want to minimize the blocking further we would have to restructure
145+
this converter to be able to convert the types asynchronously and _then_
146+
it might make sense to the serialization step itself in a background
147+
worker.
148+
*/
63149
let value = this.cache.get(schema)
64150
if (value) return value
65151

152+
const pauseDurations = opts?.pauseDurations || []
153+
const pauser = new Pauser(pauseDurations)
154+
66155
const options: Options = {
67156
fields: new Map(),
68157
duplicateFields: new Map(),
69158
arrayElements: new Map(),
70159
duplicateArrayElements: new Map(),
71160
}
72161

73-
const namedTypes = schema.getLocalTypeNames().map((name) => {
162+
const namedTypes = await pauser.map(schema.getLocalTypeNames(), (name) => {
74163
const typeDef = convertTypeDef(schema.get(name)!, name, options)
75164
return {name, typeDef}
76165
})
@@ -90,23 +179,27 @@ export class DescriptorConverter {
90179

91180
// Now we can build the de-duplicated objects:
92181
for (const [fieldDef, key] of options.duplicateFields.entries()) {
182+
await pauser.pause()
93183
builder.addObject('sanity.schema.hoisted', {key, value: {...fieldDef}})
94184
}
95185

96186
for (const [arrayElem, key] of options.duplicateArrayElements.entries()) {
187+
await pauser.pause()
97188
builder.addObject('sanity.schema.hoisted', {key, value: {...arrayElem}})
98189
}
99190

100191
for (const namedType of namedTypes) {
192+
await pauser.pause()
101193
builder.addObject('sanity.schema.namedType', namedType)
102194
}
103195

104196
if (schema.parent) {
105-
builder.addSet(await this.get(schema.parent))
197+
builder.addSet(await this.get(schema.parent, {pauseDurations}))
106198
}
107199

108200
value = builder.build('sanity.schema.registry')
109201
this.cache.set(schema, value)
202+
pauser.end()
110203
return value
111204
}
112205
}

packages/sanity/src/core/config/uploadSchema.ts

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
} from '@sanity/schema/_internal'
66
import {type Schema} from '@sanity/types'
77
import debugit from 'debug'
8+
import {max, sum} from 'lodash'
89
import {firstValueFrom} from 'rxjs'
910

1011
import {isDev} from '../environment'
@@ -13,7 +14,7 @@ import {DESCRIPTOR_CONVERTER} from '../schema'
1314

1415
const debug = debugit('sanity:config')
1516

16-
const TOGGLE = 'toggle.schema.upload'
17+
const TOGGLE = 'toggle.schema.upload-pause'
1718

1819
async function isEnabled(client: SanityClient): Promise<boolean> {
1920
if (typeof process !== 'undefined' && process?.env?.SANITY_STUDIO_SCHEMA_DESCRIPTOR) {
@@ -72,12 +73,24 @@ export async function uploadSchema(
7273
// The second step is then to actually synchronize it. This is a multi-step
7374
// process where it tries to synchronize as much as possible in each step.
7475

76+
const pauseDurations: number[] = []
7577
const before = performance.now()
76-
const sync = await DESCRIPTOR_CONVERTER.get(schema)
78+
const sync = await DESCRIPTOR_CONVERTER.get(schema, {pauseDurations})
7779
const after = performance.now()
80+
81+
const totalPause = sum(pauseDurations) || 0
82+
const maxPause = max(pauseDurations) || 0
83+
const avgPause = pauseDurations.length === 0 ? 0 : totalPause / pauseDurations.length
7884
const duration = after - before
85+
7986
if (duration > 1000) {
80-
console.warn(`Building schema for synchronization took more than 1 second (${duration}ms)`)
87+
console.warn(`Building schema for synchronization took more than 1 second (${totalPause}ms)`)
88+
}
89+
90+
if (maxPause > 100) {
91+
console.warn(
92+
`Building schema for synchronization blocked UI for more than 100ms (${maxPause}ms)`,
93+
)
8194
}
8295

8396
const descriptorId = sync.set.id
@@ -87,13 +100,22 @@ export async function uploadSchema(
87100

88101
const claimRequest: ClaimRequest = {descriptorId, contextKey}
89102

103+
const clientTimings = {
104+
convertSchema: duration,
105+
convertSchemaPauseTotal: totalPause,
106+
convertSchemaPauseMax: maxPause,
107+
convertSchemaPauseAvg: avgPause,
108+
}
109+
90110
const claimResponse = await client.request<ClaimResponse>({
91111
uri: '/descriptors/claim',
92112
method: 'POST',
93113
body: claimRequest,
94114
headers: {
95115
// We mirror the format of Server-Timing: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Server-Timing
96-
'Client-Timing': `convertSchema;dur=${duration}`,
116+
'Client-Timing': Object.entries(clientTimings)
117+
.map(([name, dur]) => `${name};dur=${dur}`)
118+
.join(','),
97119
},
98120
})
99121

0 commit comments

Comments
 (0)