0.3.0 • Published 2 years ago
@prpl/plugin-aws v0.3.0
@prpl/plugin-aws
A plugin for PRPL for working with AWS S3. Useful if you would rather have your content files stored in S3 instead of checked in under version control.
Dependencies
@prpl/plugin-aws
relies on one dependency, aws-sdk
.
Requirements
For this plugin to work, you must have:
Usage
Security recommendations:
- Do not hardcode secrets passed into this plugin's exports
- Do not check in any file (e.g.,
.env
) containing secrets under version control
Fetch from S3
const dotenv = require('dotenv');
const { fetchFromS3 } = require('@prpl/plugin-aws');
const { interpolate } = require('@prpl/core');
// Load environment variables from .env
dotenv.config();
// Destructure environment variables
const {
AWS_ACCESS_KEY: AWSAccessKey,
AWS_SECRET_ACCESS_KEY: AWSSecretAccessKey,
AWS_CONTENT_BUCKET: AWSContentBucket,
AWS_CONTENT_BUCKET_REGION: AWSContentBucketRegion
} = process.env;
// Define our arguments
const keys = {
AWSAccessKey,
AWSSecretAccessKey,
AWSContentBucket,
AWSContentBucketRegion
};
// Relative to project root, will use Node's path.resolve under the hood
const targetDir = 'content'
// Define an async function because top level await is only available in ECMAScript modules
async function build() {
// Fetch content from S3 and write it to the local file system
await fetchFromS3(keys, targetDir);
// Interpolate with PRPL core
await interpolate();
}
build();
Upload to S3
This function accepts an array of files, so you can upload one or many files as needed for your use case.
const dotenv = require('dotenv');
const { uploadToS3 } = require('@prpl/plugin-aws');
const { generateOrRetrieveFileSystemTree } = require('@prpl/core');
const { resolve } = require('path');
// Load environment variables from .env
dotenv.config();
// Destructure environment variables
const {
AWS_ACCESS_KEY: AWSAccessKey,
AWS_SECRET_ACCESS_KEY: AWSSecretAccessKey,
AWS_CONTENT_BUCKET: AWSContentBucket,
AWS_CONTENT_BUCKET_REGION: AWSContentBucketRegion
} = process.env;
// Define our arguments
const keys = {
AWSAccessKey,
AWSSecretAccessKey,
AWSContentBucket,
AWSContentBucketRegion
};
// Not required, but we will use this PRPL core lib function to take advantage of cached content files
const { children: files = [] } = generateOrRetrieveFileSystemTree({
partitionKey: PRPLCachePartitionKey.content,
entityPath: resolve('content'),
readFileRegExp: new RegExp(`${PRPLContentFileExtension.html}|${PRPLContentFileExtension.markdown}`)
})
// Define an async function because top level await is only available in ECMAScript modules
async function upload() {
// Upload files from local `content` directory to S3 bucket
await uploadToS3(keys, files);
}
upload();
0.3.0
2 years ago
0.2.12
2 years ago
0.2.11
2 years ago
0.2.10
2 years ago
0.2.7
2 years ago
0.2.9
2 years ago
0.2.8
2 years ago
0.2.6
3 years ago
0.2.5
3 years ago
0.2.4
3 years ago
0.2.3
3 years ago
0.2.2
3 years ago
0.2.1
3 years ago
0.2.0
3 years ago
0.1.7
3 years ago
0.1.4
3 years ago
0.1.3
3 years ago
0.1.6
3 years ago
0.1.5
3 years ago
0.1.2
3 years ago
0.1.1
3 years ago
0.1.0
3 years ago