1.0.1 • Published 6 months ago
@manantest/s3-uploader v1.0.1
@manantest/s3-uploader
A robust Node.js package for effortlessly uploading files to Amazon S3, supporting single file uploads, parallel uploads, and large file uploads (>5GB).
Installation
npm install @manantest/s3-uploaderQuick Start
const s3Uploader = require('@manantest/s3-uploader');
// Your encrypted AWS credentials
const config = {
encryptedAccessKey: 'your-encrypted-access-key',
encryptedSecretKey: 'your-encrypted-secret-key',
encryptionSecret: 'your-encryption-secret',
bucketName: 'your-bucket-name',
region: 'us-east-1' // optional
};Examples
1. Simple File Upload
const fs = require('fs');
async function uploadFile() {
try {
const fileBuffer = fs.readFileSync('path/to/file.jpg');
const result = await s3Uploader.uploadToS3({
...config,
key: 'folder/file.jpg',
fileBuffer
});
console.log('Upload successful:', result.signedUrl);
} catch (error) {
console.error('Upload failed:', error.message);
}
}2. Large File Upload (>5GB)
async function uploadLargeFile() {
try {
const result = await s3Uploader.createMultipartUpload({
...config,
key: 'folder/large-file.zip',
filePath: 'path/to/large-file.zip'
});
console.log('Large file upload successful:', result.signedUrl);
} catch (error) {
console.error('Multipart upload failed:', error.message);
}
}3. Parallel Upload Multiple Files
async function uploadMultipleFiles() {
const files = [
{ path: 'path/to/file1.jpg', key: 'images/file1.jpg' },
{ path: 'path/to/file2.pdf', key: 'docs/file2.pdf' }
];
try {
const result = await s3Uploader.uploadMultipleFiles({
...config,
files
});
console.log('All uploads completed:', result.results);
if (result.errors) {
console.log('Some files failed:', result.errors);
}
} catch (error) {
console.error('Batch upload failed:', error.message);
}
}API Reference
uploadToS3(options)
Upload a single file to S3.
Parameters:
options(Object)encryptedAccessKey(String): Encrypted AWS access keyencryptedSecretKey(String): Encrypted AWS secret keyencryptionSecret(String): Secret for AWS credentials decryptionbucketName(String): S3 bucket namekey(String): S3 object key/pathfileBuffer(Buffer): File contentregion(String, optional): AWS region
Returns: Promise<{ success: boolean, signedUrl: string }>
createMultipartUpload(options)
Upload large files using multipart upload.
Parameters:
options(Object)encryptedAccessKey(String): Encrypted AWS access keyencryptedSecretKey(String): Encrypted AWS secret keyencryptionSecret(String): Secret for AWS credentials decryptionbucketName(String): S3 bucket namekey(String): S3 object key/pathfilePath(String): Local file pathregion(String, optional): AWS region
Returns: Promise<{ success: boolean, signedUrl: string }>
uploadMultipleFiles(options)
Upload multiple files in parallel.
Parameters:
options(Object)files(Array): Array of{ path: string, key: string }encryptedAccessKey(String): Encrypted AWS access keyencryptedSecretKey(String): Encrypted AWS secret keyencryptionSecret(String): Secret for AWS credentials decryptionbucketName(String): S3 bucket nameregion(String, optional): AWS region
Returns:
Promise<{
success: boolean,
results: Array<{ file: string, success: boolean, signedUrl: string }>,
errors?: Array<{ file: string, error: string }>
}>Error Handling
The package provides detailed error messages for common scenarios:
try {
await s3Uploader.uploadToS3(/* ... */);
} catch (error) {
switch(error.message) {
case 'Missing required parameter: encryptedAccessKey':
// Handle missing credentials
break;
case 'Invalid or missing bucketName parameter':
// Handle invalid bucket name
break;
case 'File does not exist at specified path':
// Handle missing file
break;
default:
// Handle other errors
}
}Best Practices
Credential Security
- Never store unencrypted AWS credentials in your code
- Use environment variables for encryption secrets
- Rotate credentials regularly
Error Handling
- Always wrap API calls in try/catch blocks
- Implement proper error logging
- Handle retries for network issues
Performance
- Use parallel uploads for multiple small files
- Use multipart upload for files >5GB
- Monitor memory usage with large files
License
ISC
Author
Formidium