如何使用DynamoDB批量删除?

Dav*_*ein 9 node.js amazon-dynamodb

我收到一条错误"提供的关键元素与架构不匹配".uuid是我的主分区键.我也有一个主要的排序键version.我想我可以使用batchWrite(docs)删除所有相同的项目uuid.

我的ES6代码如下:

delete(uuid) {
  const promise = new Promise();
  const params = {
    RequestItems: {
      [this.TABLE]: [
        {
          DeleteRequest: {
            Key: { uuid: uuid }
          }
        }
      ]
    }
  };


  // this._client references the DocumentClient
  this._client.batchWrite(params, function(err, data) {
    if (err) {
      // this gets hit with error
      console.log(err);
      return promise.reject(err);
    }

    console.log(result);
    return promise.resolve(result);
  });

  return promise;
}
Run Code Online (Sandbox Code Playgroud)

不确定为什么它是主键的错误.我曾经看过有关在我搜索不是关键字的时候需要其他索引的帖子.但我不相信这就是这种情况.

not*_*est 12

这是批量写入删除请求示例.此代码已经过测试并且运行正常.如果您根据需要更改此代码,它应该可以工作.

表定义: -

袋子 - 表名

包 - 哈希键

'Bag'表中没有分区键

批量写代码: -

var AWS = require("aws-sdk");

AWS.config.update({
    region : "us-west-2",
    endpoint : "http://localhost:8000"
});

var documentclient = new AWS.DynamoDB.DocumentClient();

var itemsArray = [];

var item1 = {
    DeleteRequest : {
        Key : {
            'bag' : 'b1'    
        }
    }
};

itemsArray.push(item1);

var item2 = {
    DeleteRequest : {
        Key : {
            'bag' : 'b2'    
        }
    }
};

itemsArray.push(item2);

var params = {
    RequestItems : {
        'Bag' : itemsArray
    }
};
documentclient.batchWrite(params, function(err, data) {
    if (err) {
        console.log('Batch delete unsuccessful ...');
        console.log(err, err.stack); // an error occurred
    } else {
        console.log('Batch delete successful ...');
        console.log(data); // successful response
    }

});
Run Code Online (Sandbox Code Playgroud)

输出: -

Batch delete successful ...
{ UnprocessedItems: {} }
Run Code Online (Sandbox Code Playgroud)

  • 请注意,BatchWriteItem 仅限于 25 个放置或删除请求 http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html (3认同)

小智 5

这是完全可能的,你最好使用 Node lambda,并且你需要为大型数据库考虑一些事情:

const AWS = require("aws-sdk"),
  dynamodb = new AWS.DynamoDB.DocumentClient(),
  log = console.log;

exports.handler = async (event) => {
  log(event);

  var TableName = event.tableName,
    params = {
      TableName,
      FilterExpression: "userId = :uid",
      ExpressionAttributeValues: {
        ":uid": event.userId,
      },
    },
    getItems = async (lastKey, items) => {
      if (lastKey) params.ExclusiveStartKey = lastKey;
      var resp = await dynamodb.scan(params).promise();
      items = resp.Items.length
        ? items.concat(resp.Items.map((x) => x.id))
        : items;
      if (resp.LastEvaluatedKey)
        return await getItems(resp.LastEvaluatedKey, items);
      else return items;
    },
    ids = await getItems(null, []),
    idGroups = [];

  for (var i = 0; i < ids.length; i += 25) {
    idGroups.push(ids.slice(i, i + 25));
  }

  for (var gs of idGroups) {
    var delReqs = [];
    for (var id of gs) {
      delReqs.push({ DeleteRequest: { Key: { id } } });
    }
    var RequestItems = {};
    RequestItems[TableName] = delReqs;
    var d = await dynamodb
      .batchWrite({
        RequestItems,
      })
      .promise()
      .catch((e) => log(e));
    //log(d)
  }
  log(ids.length + " items processed");
  return {};
};

Run Code Online (Sandbox Code Playgroud)