fp/packages/scout/src/s3.js

60 lines
1.8 KiB
JavaScript
Raw Normal View History

2024-06-04 23:06:47 +00:00
import dotenv from 'dotenv'
dotenv.config({
path: '../../.env'
})
import { S3Client } from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
// import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { createId } from '@paralleldrive/cuid2'
import { basename } from 'node:path'
import fs from 'node:fs'
2024-06-12 04:28:36 +00:00
if (!process.env.S3_BUCKET_NAME) throw new Error('S3_BUCKET_NAME was undefined in env');
2024-06-04 23:06:47 +00:00
if (!process.env.SCOUT_NITTER_URL) throw new Error('SCOUT_NITTER_URL was undefined in env');
export async function uploadFile(filePath) {
if (!filePath) throw new Error("first argument, 'filePath' is undefined");
const client = new S3Client({
endpoint: 'https://s3.us-west-000.backblazeb2.com',
region:'us-west-000',
credentials:{
accessKeyId: process.env.S3_BUCKET_KEY_ID,
secretAccessKey: process.env.S3_BUCKET_APPLICATION_KEY
}
});
const target = {
Bucket: process.env.S3_BUCKET_NAME,
Key: `${createId()}-${basename(filePath)}`,
Body: fs.createReadStream(filePath)
}
// greets https://stackoverflow.com/a/70159394/1004931
try {
const parallelUploads3 = new Upload({
client: client,
//tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
// parallelUploads3.on("httpUploadProgress", (progress) => {
// console.log(progress);
// });
const res = await parallelUploads3.done();
return res
} catch (e) {
console.error(`while uploading a file to s3, we encountered an error`)
throw new Error(e);
}
}