下载 azure blob 流到 zip nodejs

Sid*_*har 0 zip azure node.js

我已经从 azure blob 实现了 getBlobToStream()。工作正常,但现在我想将流转换为 zip,然后下载它。但我每次都尝试过但都失败了。这是我的单流下载的工作代码

  fileModel.findOne({ _id: req.params.id, canceled: false, isVisible: true, linkExpired: false, isBlocked: false, isDeleted: false }).exec(async function (error, result) {
    if (error) {
      resolve(error);
    } else {
      const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
      const storage = require('azure-storage');
      const blobService = storage.createBlobService(process.env.BLOB_ACCOUNT, process.env.BLOB_ACCOUNT_KEY); 
      blobService.getBlobToStream(result.containerName, result.blobName, res, function (error, blob) {
        if (!error) { 
          console.log(blob); 
          res.end()
        } else {
          console.log(error);
          res.end();
        }
      });
    }
  })
})
Run Code Online (Sandbox Code Playgroud)

帮助我将流下载到 zip。另外,告诉我这可能吗?

kri*_*shg 7

getBlobToStream方法来自旧azure-storage包。在新的情况下@azure/storage-blob,确实如此downloadarchiver下面是使用 压缩下载流并保存到文件“test.zip”的代码fs


async function main() {
    const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
 
    const STORAGE_ACCOUNT_NAME = "<your storage account name>";
    const ACCOUNT_ACCESS_KEY = "<your storage account key>";
  
    const containerName = "<your container name>";
    const blobName = "<your blob name>";

    const zipFilePath = "D:\\test.zip"; // a path where the output zip file would get saved

    const credentials = new StorageSharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
    const blobServiceClient = new BlobServiceClient(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`,credentials);
    const containerClient = blobServiceClient.getContainerClient(containerName);
    const blobClient = containerClient.getBlobClient(blobName);

    const response = await blobClient.download(0); // download from 0 offset 
    await streamToCompressed(response.blobDownloadStream, zipFilePath, blobName);
}

async function streamToCompressed(readableStream, outputFilePath, blobName) {  
  return new Promise((resolve, reject) => {  

    const fs = require("fs");
    const archiver = require('archiver');

    // create a file to stream archive data to. 
    // In case you want to directly stream output in http response of express, just grab 'res' in that case instead of creating file stream
    const output = fs.createWriteStream(outputFilePath);
    const archive = archiver('zip', {
      zlib: { level: 9 } // Sets the compression level.
    });

    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    output.on('close', () => {
      console.log(archive.pointer() + ' total bytes');
      console.log('archiver has been finalized and the output file descriptor has closed.');
      resolve();
    });

    // good practice to catch warnings (ie stat failures and other non-blocking errors)
    archive.on('warning', (err) => {
      if (err.code === 'ENOENT') {
        // log warning
      } else {
        // throw error
        throw err;
      }
    });
    
    // good practice to catch this error explicitly
    archive.on('error', (err) => {
      throw err;
    });    
 
    // pipe archive data to the file
    archive.pipe(output);

    // finalize the archive (ie we are done appending files but streams have to finish yet)
    archive.append(readableStream, { name: blobName })
            .finalize();

    readableStream.on("error", reject); 
  });
}

main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));
Run Code Online (Sandbox Code Playgroud)

多个文件支持的更新:


async function main() {
    const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
 
    const STORAGE_ACCOUNT_NAME = "<your storage account name>";
    const ACCOUNT_ACCESS_KEY = "<your storage account key>";

    const containerName = "<your container name>";
    const blobNames = [ "blob 1 name", "blob 2 name" ];

    const zipFilePath = "D:\\test.zip";

    const credentials = new StorageSharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
    const blobServiceClient = new BlobServiceClient(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`,credentials);
    const containerClient = blobServiceClient.getContainerClient(containerName);

    const streamDict = {}; // to have a map of blobName and it's corresponding stream

    for(const i in blobNames)
    {
      const blobName = blobNames[i];
      const blobClient = containerClient.getBlobClient(blobName);
      const response = await blobClient.download(0); // download from 0 offset 
      streamDict[blobName] = response.blobDownloadStream;
    }

    await streamsToCompressed(streamDict, zipFilePath);
}

async function streamsToCompressed(streamDict, outputFilePath) {  
  return new Promise((resolve, reject) => {  

    const fs = require("fs");
    const archiver = require('archiver');

    // create a file to stream archive data to. 
    // In case you want to directly stream output in http response of express, just grab 'res' in that case instead of creating file stream
    const output = fs.createWriteStream(outputFilePath);
    const archive = archiver('zip', {
      zlib: { level: 9 } // Sets the compression level.
    });

    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    output.on('close', () => {
      console.log(archive.pointer() + ' total bytes');
      console.log('archiver has been finalized and the output file descriptor has closed.');
    });

    // good practice to catch warnings (ie stat failures and other non-blocking errors)
    archive.on('warning', (err) => {
      if (err.code === 'ENOENT') {
        // log warning
      } else {
        // throw error
        throw err;
      }
    });
    
    // good practice to catch this error explicitly
    archive.on('error', (err) => {
      throw err;
    });    
 
    // pipe archive data to the file
    archive.pipe(output);
    
    for(const blobName in streamDict) {
        const readableStream = streamDict[blobName];
        
        // finalize the archive (ie we are done appending files but streams have to finish yet)
        archive.append(readableStream, { name: blobName });

        readableStream.on("error", reject);
    }

    archive.finalize();
    resolve();
  });
}

main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));
Run Code Online (Sandbox Code Playgroud)