Axios get a file from URL and upload to s3

I’m trying to get files from a site using axios.get, and then uploading it directly to S3. However, the files are corrupted or not encoded properly, and can’t be opened after upload. File types range from .jpg, .png to .pdf. Here is my code:

axios.get(URL, {
  responseEncoding: 'binary',
  responseType: 'document',
}).then((response) => {
  return new Promise((resolve, reject) => {
    const s3Bucket = nconf.get('AWS_S3_BUCKET');

    s3.upload({
      'ACL': 'public-read',
      'Body': response.data,
      'Bucket': s3Bucket,
      'Key': `static/${filePath}/${fileManaged.get('filename')}`,
    }, function(err) {
      if (err) {
        return reject(err);
      }
    });
  });
});

I’ve tried modifying responseType to arraybuffer and creating a buffer using Buffer.from(response.data, 'binary').toString('base64'), to no avail. What am I missing?

Here is Solutions:

We have many solutions to this problem, But we recommend you to use the first solution because it is tested & true solution that will 100% work for you.

Solution 1

I was able to get it working by using an arraybuffer and the .putObject function instead of .upload

axios.get(encodeURI(url), {
  responseType: 'arraybuffer',
}).then((response) => {
s3.putObject({
  'ACL': 'public-read',
  'Body': response.data,
  'Bucket': s3Bucket,
  'Key': `static/${filePath}/${fileManaged.get('filename')}`,
} function(err) {

Solution 2

Axios encodes the response body in utf8.
You should use other library like request.

Solution 3

the response from John Xu is correct. But in my case I had to add: Buffer.from(image.data, ‘utf8’) as stated above in order to get the correct buffer similar to a request response. here is my code:

const AWS = require('aws-sdk');
const axios = require('axios');

/**
 * saveImage              saves an image file into s3
 * @param {*} fullname    absolute path and file name of the file to be uploaded
 * @param {*} filecontent buffer of the image file
 */
var uploadFile = async function (s3_creds, fullname, filecontent, filetype) {
    const s3 = new AWS.S3(s3_creds);
    return new Promise((resolve, reject) => {
        // Add a file to a Space
        
        var params = {
            Key: fullname, // absolute path of the file
            Body: filecontent, 
            Bucket: "docserpcloud",
            ACL: "public-read", // or private
            ContentEncoding: 'binary', 
            ContentType: filetype 
        };
        // console.log(params)
        s3.putObject(params, function (err, data) {
            if (err) {
                console.log(err, err.stack);
                reject(err)
            } else {
                resolve(data);
                console.log(data);
            }
        });
    })
}

var getFilefromURL = async function (imageuri) {
    // console.log (imageuri)
    return new Promise((resolve, reject) => {
        try {
            axios.get(encodeURI(imageuri), {
                responseType: "arraybuffer"
            }).then((response) => {
                resolve(response)
            })
        } catch (err) {
            reject(err)
        }
    })
}


/**
 * saveImageFromUrl    gest a file from an url and saves a copy on s3 bucket
 * @param {*} imageuri full URL to an image
 * @param {*} fullname absolute path and filename of the file to be writen on s3
 */
var saveFileFromUrl = async function (s3_creds, imageuri, fullname) {
    return new Promise((resolve, reject) => {
        getFilefromURL(imageuri).then(image => {
            // console.log(image.res)
            uploadFile(s3_creds, fullname, Buffer.from(image.data, 'utf8'), image.headers['content-type']).then(s3response => {
                resolve(s3response)
            }).catch(err => {
                reject(err)
            })
        }).catch(err => {
            reject(err)
        })
    })
}


module.exports = {
    uploadFile: uploadFile,
    getFilefromURL: getFilefromURL,
    saveFileFromUrl: saveFileFromUrl
}

 async function main() {
     try {
         var s3_creds = {
             "accessKeyId": "acessid",
             "endpoint": "xxxx.digitaloceanspaces.com",
             "secretAccessKey": "Vttkia0....."
         };
         await saveFileFromUrl(s3_creds, "https://gitlab.com/qtree/erpcloud_logos/-/raw/master/pdf_logo2.png?inline=true", 'media/pddd.png');
     } catch {}
 }

main();

update s3_creds to fit your credentials and run it to upload the pdf logo.

Regards,
Enrique

Note: Use and implement solution 1 because this method fully tested our system.
Thank you 🙂

All methods was sourced from stackoverflow.com or stackexchange.com, is licensed under cc by-sa 2.5, cc by-sa 3.0 and cc by-sa 4.0

Leave a Reply