bulkWrite not working (returns 0 count on bulk insert)

Hi Team,

I’m trying to do bulk insertions in DB (around 1731 entries) by using the following cloud function code.

Moralis.Cloud.define(
  "bulkInsertData",
  async function (request) {
    return await Moralis.bulkWrite("Test_Bulk_Data", request.params.bulkData)
  }
);

and getting 0 insert count in the response like this:

data: {
    result: {
      result: [Object],
      insertedCount: 0,
      matchedCount: 0,
      modifiedCount: 0,
      deletedCount: 0,
      upsertedCount: 0,
      upsertedIds: {},
      insertedIds: {},
      n: 0
    }
  }

The bulkData passed to the cloud function is also in proper format like this:

[{update: {"name" : "Apple", "color" : "green"},
                     {update: {"name" : "Orange", "color" : "orange"}]

Could you please assist me for inserting bulk data in DB.

Thanks.

Also having trouble with this function. Any confirmation that it is working for anybody?

I would expect it to work. What is the code that you use?

Essentially just trying to save a full 10k NFT collection to database.

// Make subsequent call to Moralis Web3 API using updated cursor
// until all NFTs are loaded  
let cursor = null;
do {
  const NFTs = await Moralis.Web3API.token.getAllTokenIds({
    address: collectionAddress,
    limit: 100,
    cursor: cursor,
    chain: chain,
  });
  // If results are found, save to master array
  if (
    NFTs.result !== undefined &&
    NFTs.result !== null &&
    NFTs.result.length !== 0
  ) {
    allNFTs = allNFTs.concat(NFTs.result);
    // Update cursor
    cursor = NFTs.cursor;
  } else {
    // Update cursor
    cursor = null;
  }
  await new Promise((resolve) => setTimeout(resolve, 100));
} while (cursor != "" && cursor != null);

/* ...
 Manipulation on data that takes a rather long time
...
*/
let nftArr = allNFTs;
let arrayToWrite = [];
for (let i = 0; i < nftArr.length; i++) {
  arrayToWrite.push({
   update: {
    name: nftArr[i].name,
    token_id: nftArr[i].token_id,
    token_address: nftArr[i].token_address,
    token_hash: nftArr[i].token_hash,
    token_uri: nftArr[i].token_uri,
    symbol: nftArr[i].symbol,
    contract_type: nftArr[i].contract_type,
    amount: nftArr[i].amount,
    image: nftArr[i].image,
    metadata: nftArr[i].metadata,
    synced_at: nftArr[i].synced_at,
    last_token_uri_sync: nftArr[i].last_token_uri_sync,
    last_metadata_sync: nftArr[i].last_metadata_sync,
    block_number_minted: nftArr[i].block_number_minted,
  },
});
}
await Moralis.bulkWrite(
          "Collection_" + chain + "_" + collectionAddress,
          arrayToWrite
        );
//

Server logs blank out and seems like server restarts. I have tried with individual saves too and I get about 300-1700 saves before it crashes.

Here is my code for individual saves:

for (let i = 0; i < nftArr.length; i++) {
 if (i % 100 === 0) {
    logger.info("checkpoint #" + i);
    await new Promise((resolve) => setTimeout(resolve, 1000));
 }
let newObject = new Moralis.Object.extend("Collection"+address);
            newObject.set("name", nftArr[i].name);
            newObject.set("attributes", nftArr[i].Attributes);
            newObject.set("token_id", nftArr[i].token_id);
            newObject.set("token_address", nftArr[i].token_address);
            newObject.set("token_hash", nftArr[i].token_hash);
            newObject.set("token_uri", nftArr[i].token_uri);
            newObject.set("symbol", nftArr[i].symbol);
            newObject.set("contract_type", nftArr[i].contract_type);
            newObject.set("amount", nftArr[i].amount);
            newObject.set("image", nftArr[i].image);
            newObject.set("metadata", nftArr[i].metadata);
            newObject.set("synced_at", nftArr[i].synced_at);
            newObject.set("last_token_uri_sync", nftArr[i].last_token_uri_sync);
            newObject.set("last_metadata_sync", nftArr[i].last_metadata_sync);
            newObject.set("block_number_minted", nftArr[i].block_number_minted);
            await newObject.save(null, { useMasterKey: true });
}

Logs look like this:

2022-06-08T04:35:25.988Z - Ran cloud function coreservices_getOrganization for user undefined with:
  Input: {}
  Result: {"status":200,"data":{"success":true,"result":{"id":"314","speedyTokenSignature":"11d7f2f3df81a28b1584c35c100d7953192c384fe102a497984753f6597aa92b"}}}
2022-06-08T04:35:25.968Z - Initialized Function plugin loadPlugins
2022-06-08T04:35:25.904Z - Ran cloud function checkSessionToken for user 7OrxxQJoHAdiFE4lampn0EcH with:
  Input: {}
  Result: true
2022-06-08T04:35:25.863Z - Plugin coreservices is healthy.
2022-06-08T04:35:25.830Z - Migration 0.0.1 Applied Correctly
2022-06-08T04:35:23.780Z - Parse LiveQuery Server starts running
2022-06-08T04:34:43.234Z - checkpoint #400
2022-06-08T04:34:05.970Z - checkpoint #300
2022-06-08T04:34:03.089Z - checkpoint #200
2022-06-08T04:33:59.423Z - checkpoint #100
2022-06-08T04:33:55.948Z - checkpoint #0

No errors reported in logs although I have try-catch’s around all my operations.

Is there maybe a timeout limit on cloud functions that we are hitting when doing large save operations? Or a limit on saves per second? Although both cases surely should throw an error, not just crash the server.

The server could crash/restart if it doesn’t have enough memory.

You can try to connect directly to mongo db and add that data.