I use the Shopify API to listen for events when a new product is created. When I receive a webhook, I retrieve the following JSON data:
{
"admin_graphql_api_id": "gid://shopify/Product/8867427221826",
"body_html": "<strong>Thshirt in verschiedenen farben</strong>",
"created_at": "2023-10-14T10:51:06-04:00",
"handle": "t-shirt",
"id": 8867427221826,
"product_type": "",
"published_at": "2023-10-14T10:51:06-04:00",
"template_suffix": "",
"title": "T-shirt",
"updated_at": "2023-10-14T10:51:08-04:00",
"vendor": "crystelorbs",
"status": "active",
"published_scope": "global",
"tags": "",
"variants": [
{
"admin_graphql_api_id": "gid://shopify/ProductVariant/47150807122242",
"barcode": "",
"compare_at_price": null,
"created_at": "2023-10-14T10:51:06-04:00",
"fulfillment_service": "manual",
"id": 47150807122242,
"inventory_management": "shopify",
"inventory_policy": "deny",
"position": 1,
"price": "29.95",
"product_id": 8867427221826,
"sku": "321dsfase1231321w",
"taxable": true,
"title": "34 / Blau",
"updated_at": "2023-10-14T10:51:06-04:00",
"option1": "34",
"option2": "Blau",
"option3": null,
"grams": 0,
"image_id": null,
"weight": 0,
"weight_unit": "kg",
"inventory_item_id": 49189935579458,
"inventory_quantity": 0,
"old_inventory_quantity": 0,
"requires_shipping": true
},
{
"admin_graphql_api_id": "gid://shopify/ProductVariant/47150807187778",
"barcode": "",
"compare_at_price": null,
"created_at": "2023-10-14T10:51:07-04:00",
"fulfillment_service": "manual",
"id": 47150807187778,
"inventory_management": "shopify",
"inventory_policy": "deny",
"position": 2,
"price": "29.95",
"product_id": 8867427221826,
"sku": "321dsfase1231321w-2",
"taxable": true,
"title": "33 / Blau",
"updated_at": "2023-10-14T10:51:07-04:00",
"option1": "33",
"option2": "Blau",
"option3": null,
"grams": 0,
"image_id": null,
"weight": 0,
"weight_unit": "kg",
"inventory_item_id": 49189935644994,
"inventory_quantity": 0,
"old_inventory_quantity": 0,
"requires_shipping": true
},
{
"admin_graphql_api_id": "gid://shopify/ProductVariant/47150807220546",
"barcode": "",
"compare_at_price": null,
"created_at": "2023-10-14T10:51:07-04:00",
"fulfillment_service": "manual",
"id": 47150807220546,
"inventory_management": "shopify",
"inventory_policy": "deny",
"position": 3,
"price": "29.95",
"product_id": 8867427221826,
"sku": "321dsfase1231321w-3",
"taxable": true,
"title": "12 / Blau",
"updated_at": "2023-10-14T10:51:07-04:00",
"option1": "12",
"option2": "Blau",
"option3": null,
"grams": 0,
"image_id": null,
"weight": 0,
"weight_unit": "kg",
"inventory_item_id": 49189935677762,
"inventory_quantity": 0,
"old_inventory_quantity": 0,
"requires_shipping": true
}
],
"options": [
{
"name": "Größe",
"id": 11148870222146,
"product_id": 8867427221826,
"position": 1,
"values": [
"34",
"33",
"12"
]
},
{
"name": "Farbe",
"id": 11148870254914,
"product_id": 8867427221826,
"position": 2,
"values": [
"Blau"
]
}
],
"images": [
{
"id": 43049062433090,
"position": 1,
"product_id": 8867427221826,
"width": 650,
"height": 300,
"alt": null,
"src": "https://cdn.shopify.com/s/files/1/0757/6479/3666/files/ggg-modified_6dcaf2fa-d307-4ee7-bdf1-c7a9dbeefd62.png?v=1697295067",
"created_at": "2023-10-14T10:51:07-04:00",
"updated_at": "2023-10-14T10:51:07-04:00",
"admin_graphql_api_id": "gid://shopify/ProductImage/43049062433090",
"variant_ids": []
}
],
"image": {
"id": 43049062433090,
"position": 1,
"product_id": 8867427221826,
"width": 650,
"height": 300,
"alt": null,
"src": "https://cdn.shopify.com/s/files/1/0757/6479/3666/files/ggg-modified_6dcaf2fa-d307-4ee7-bdf1-c7a9dbeefd62.png?v=1697295067",
"created_at": "2023-10-14T10:51:07-04:00",
"updated_at": "2023-10-14T10:51:07-04:00",
"admin_graphql_api_id": "gid://shopify/ProductImage/43049062433090",
"variant_ids": []
}
}
I create the product with transactions. For example, a product may have 50 images and 30 variant values, which require a loop of 50x client.query('INSERT INTO ....').
Would using a for loop for each image in an array be a bad idea since Node.js is single-threaded?
I heard about worker threads, but I use a queue system. Do I have to use worker threads when I use a queue system?
Code:
export const CreateProduct = async ({product, shop_id}: { product: _Product; shop_id: UUID; }): Promise<ReturnQuery> => {
const client = await pool.connect();
try {
let product_options_data: {
p_o_id: number;
values: string[];
}[] = [];
await client.query('BEGIN');
const Product: QueryResult = await client.query(`
INSERT INTO product
(info, name, status, shop_id)
VALUES
($1, $2, $3, $4);
`, [product.body_html, product.title, product.status, shop_id]);
const p_id = Product.rows[0].id;
if(product?.images) {
for(let i = 0; i < product?.images?.length; i++) {
await client.query('INSERT INTO product_media (p_id, src, alt, s_position) VALUES ($1, $2, $3, $4);', [p_id, product.images[i].src, product.images[i].alt, product.images[i].position]);
}
}
if(product?.options) {
if(product.options.length > 0) {
for(let a = 0; a < product.options.length; a++) {
if(product?.options[a].values) {
const p_o_id: QueryResult = await client.query('INSERT INTO product_options (p_id, name, position) VALUES ($1, $2, $3);', [p_id, product.options[a].name, product.options[a].position]);
product_options_data.push({
p_o_id: p_o_id.rows[0].id,
values: product.options[a].values as string[]
})
}
}
}
}
await client.query('COMMIT');
} catch(e) {
await client.query('ROLLBACK');
console.log(e);
Sentry.captureException(e);
return {
message: ErrorMessageQuery,
statusCode: 500,
query: 'SHOPIFY_CREATE_PRODUCT'
}
} finally {
client.release();
}
};
I am wondering if there are any ways to improve my code. I have heard that it's recommended to make transactions small, but in cases where a product has an array of 50 images or many variants, I have to loop through all of them and add them to my database. Can you provide me with some helpful tips to make this solution better?
You're using
await
for each INSERT. That's good. You're not jamming the javascript main loop while doing all this work. So other actions on your server will proceed normally while it processes this action, even if it takes a while.And, you're doing the operation inside a transaction (
BEGIN
/COMMIT
) That is great for the performance of your DBMS.It looks to me like you did this right.
If you have a big volume of this stuff you could refactor this operation into something done from a work queue by a service process. But that's a big change and probably unnecessary unless your name is WAL or MART.