| endpoint | bulk |
|---|---|
| lang | javascript |
| es_version | 9.3 |
| client | @elastic/elasticsearch@9.3.0 |
Use client.helpers.bulk to index multiple documents in a single
operation. The helper manages chunking, retries, and back-pressure
automatically.
const result = await client.helpers.bulk({
datasource: [
{ name: "Espresso Machine Pro", brand: "BrewMaster", price: 899.99, category: "appliances", in_stock: true, rating: 4.7 },
{ name: "Noise-Cancelling Headphones", brand: "SoundCore", price: 249.00, category: "electronics", in_stock: true, rating: 4.5 },
{ name: "Ergonomic Standing Desk", brand: "DeskCraft", price: 599.00, category: "furniture", in_stock: false, rating: 4.8 },
{ name: "4K Webcam with Mic", brand: "StreamGear", price: 129.99, category: "electronics", in_stock: true, rating: 4.3 },
{ name: "Cast Iron Dutch Oven", brand: "HearthStone", price: 79.95, category: "cookware", in_stock: true, rating: 4.9 },
{ name: "Mechanical Keyboard", brand: "TypeForce", price: 169.00, category: "electronics", in_stock: true, rating: 4.6 },
{ name: "Air Purifier HEPA-13", brand: "CleanAir", price: 349.00, category: "appliances", in_stock: true, rating: 4.4 },
{ name: "Bamboo Cutting Board Set", brand: "HearthStone", price: 34.99, category: "cookware", in_stock: true, rating: 4.2 },
],
onDocument(doc) {
return { index: { _index: "products" } };
},
});
console.log(`Indexed ${result.successful} of ${result.total} documents`);The datasource accepts an array, an async generator, or a readable
stream. The onDocument callback determines the action for each document
and must return an operation object.
Use onDrop to capture documents that fail after all retries are
exhausted:
const result = await client.helpers.bulk({
datasource: products,
onDocument(doc) {
return { index: { _index: "products" } };
},
onDrop(doc) {
console.error(`Failed to index: ${JSON.stringify(doc.document)}`);
console.error(` Reason: ${doc.error?.reason}`);
},
});Adjust flushBytes, concurrency, and retry settings to balance
throughput against cluster pressure:
const result = await client.helpers.bulk({
datasource: products,
onDocument(doc) {
return { index: { _index: "products" } };
},
flushBytes: 1_000_000,
concurrency: 3,
retries: 3,
wait: 5000,
});For streamed or paginated data, pass an async generator as the datasource to keep memory usage bounded:
async function* fetchProducts() {
let page = 0;
while (true) {
const res = await fetch(`https://api.example.com/products?page=${page}`);
const batch = await res.json();
if (batch.length === 0) break;
for (const item of batch) yield item;
page++;
}
}
const result = await client.helpers.bulk({
datasource: fetchProducts(),
onDocument(doc) {
return { index: { _index: "products" } };
},
});