RethinkDB: Update multiple array entries using ReQL with high performance

166 views Asked by At

Inside a single document of my table, I'm trying to achieve an update of several entries of an array with high performance.

The array is around 20k entries, and I'm trying to merge 5k element changes. Here's an example :

"entities": [
    {
        "debug_name": {
            "value": "Entity 1"
        },
        "euid": {
            "value": "9fcde834-f5ca-4b98-885d-c000365bd921"
        },
        "local_transform": {
            "orientation": [
                -0.7070000171661377,
                0,
                0,
                0.7070000171661377
            ],
            "position": [
                0,
                0,
                0
            ],
            "scale": [
                1,
                1,
                1
            ]
        }
    },
    {
        "debug_name": {
            "value": "Default (IfcSite)"
        },
        "euid": {
            "value": "9fcde834-f5ca-4b98-885d-c000365bd923"
        },
        "lineage": {
            "parentUUID": "9fcde834-f5ca-4b98-885d-c000365bd921"
        },
        "local_transform": {
            "orientation": [
                0,
                0,
                0,
                1
            ],
            "position": [
                0,
                0,
                0
            ],
            "scale": [
                1,
                1,
                1
            ]
        }
    },
    [.....]
]

The entries are identified by the "euid": {"value"} object, and my goal is to merge some values into these entries.

My first approch was:

r.db('desc')
.table('scene')
.get(sceneUUID)
.update(function(row)
{
    const cMap = r.expr(componentMap);

    return {
        entities : row('entities').map(entity =>
        {
            const euid  = entity('euid')('value');

            return r.branch(
                 cMap.hasFields(euid),
                 entity.merge(cMap.getField(euid)),
                 entity
            );
        })
    };
})

componentMap is an object used as a map referencing new values to merge with the existing entries.

Example :

{
    "32204602-445c-453f-b94a-5f31bb6858ea": {
        "material_ref": {
            "value":"c22177c3-32d7-453b-bfb3-bdc80f6de01e",
            "faceCulling":0
        }
    },
    [.....]
 }

This query works, but take more than 5 minutes when submitting a componentMap of 5k entries on array of 18163 elements. During those 5 minutes the document cannot be read.

My questions are :

  • How could I optimize this query ?

  • Would sharding or scaling my database would improve the performance of this query ?

0

There are 0 answers