Rocky_Mountain_Vending/scripts/upload-to-r2.js

319 lines
8.9 KiB
JavaScript
Raw Permalink Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env node
/**
* Upload manuals assets to any S3-compatible object storage (Cloudflare R2, MinIO, etc.)
*
* Usage:
* node scripts/upload-to-r2.js --type manuals --dry-run
* node scripts/upload-to-r2.js --type thumbnails
* node scripts/upload-to-r2.js --type all
*
* Environment variables required:
* S3_ENDPOINT_URL or CLOUDFLARE_R2_ENDPOINT
* AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY (or CLOUDFLARE_* variants)
*/
import {
S3Client,
PutObjectCommand,
ListObjectsV2Command,
HeadObjectCommand,
} from "@aws-sdk/client-s3"
import { readdir, stat, readFile } from "fs/promises"
import { join, relative, dirname } from "path"
import { existsSync } from "fs"
const ACCOUNT_ID =
process.env.CLOUDFLARE_ACCOUNT_ID || "bd6f76304a840ba11b75f9ced84264f4"
const ENDPOINT =
process.env.MANUALS_STORAGE_ENDPOINT ||
process.env.S3_ENDPOINT_URL ||
process.env.CLOUDFLARE_R2_ENDPOINT ||
`https://${ACCOUNT_ID}.r2.cloudflarestorage.com`
const ACCESS_KEY_ID =
process.env.MANUALS_STORAGE_ACCESS_KEY_ID ||
process.env.CLOUDFLARE_R2_ACCESS_KEY_ID ||
process.env.AWS_ACCESS_KEY_ID ||
process.env.AWS_ACCESS_KEY
const SECRET_ACCESS_KEY =
process.env.MANUALS_STORAGE_SECRET_ACCESS_KEY ||
process.env.CLOUDFLARE_R2_SECRET_ACCESS_KEY ||
process.env.AWS_SECRET_ACCESS_KEY ||
process.env.AWS_SECRET_KEY
const REGION =
process.env.MANUALS_STORAGE_REGION || process.env.AWS_REGION || "us-east-1"
const FORCE_PATH_STYLE = ["1", "true", "yes", "on"].includes(
String(
process.env.MANUALS_STORAGE_FORCE_PATH_STYLE ||
process.env.AWS_S3_FORCE_PATH_STYLE ||
"true"
).toLowerCase()
)
// Bucket names
const MANUALS_BUCKET = process.env.R2_MANUALS_BUCKET || "vending-vm-manuals"
const THUMBNAILS_BUCKET =
process.env.R2_THUMBNAILS_BUCKET || "vending-vm-thumbnails"
// Source directories (relative to project root)
const MANUALS_DATA_ROOT =
process.env.MANUALS_DATA_ROOT ||
(existsSync(join(process.cwd(), "..", "manuals-data"))
? join(process.cwd(), "..", "manuals-data")
: "/Users/matthewcall/Documents/VS Code Projects/Rocky Mountain Vending/manuals-data")
const MANUALS_SOURCE = join(MANUALS_DATA_ROOT, "manuals")
const THUMBNAILS_SOURCE = join(MANUALS_DATA_ROOT, "thumbnails")
// Parse command line arguments
const args = process.argv.slice(2)
const typeArg =
args.find((arg) => arg.startsWith("--type="))?.split("=")[1] ||
(args.includes("--type") ? args[args.indexOf("--type") + 1] : null)
const uploadType = typeArg || "all"
const dryRun = args.includes("--dry-run") || args.includes("-d")
const incremental = args.includes("--incremental") || args.includes("-i")
if (!ACCESS_KEY_ID || !SECRET_ACCESS_KEY || !ENDPOINT) {
console.error(
"❌ Error: S3-compatible manuals storage env vars are incomplete"
)
console.error(
" Set S3_ENDPOINT_URL plus AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY (or CLOUDFLARE_* equivalents)"
)
process.exit(1)
}
// Initialize S3 client
const s3Client = new S3Client({
region: REGION,
endpoint: ENDPOINT,
forcePathStyle: FORCE_PATH_STYLE,
credentials: {
accessKeyId: ACCESS_KEY_ID,
secretAccessKey: SECRET_ACCESS_KEY,
},
})
/**
* Check if object exists in R2
*/
async function objectExists(bucket, key) {
try {
await s3Client.send(new HeadObjectCommand({ Bucket: bucket, Key: key }))
return true
} catch (error) {
if (error.name === "NotFound" || error.$metadata?.httpStatusCode === 404) {
return false
}
throw error
}
}
/**
* Get content type based on file extension
*/
function getContentType(filename) {
const ext = filename.split(".").pop()?.toLowerCase()
const types = {
pdf: "application/pdf",
jpg: "image/jpeg",
jpeg: "image/jpeg",
png: "image/png",
webp: "image/webp",
gif: "image/gif",
}
return types[ext] || "application/octet-stream"
}
/**
* Upload a single file to R2
*/
async function uploadFile(bucket, key, filePath, dryRun = false) {
if (dryRun) {
console.log(` [DRY RUN] Would upload: ${key}`)
return { uploaded: false, skipped: false, key }
}
try {
const fileContent = await readFile(filePath)
const contentType = getContentType(key)
await s3Client.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: fileContent,
ContentType: contentType,
})
)
return { uploaded: true, skipped: false, key }
} catch (error) {
console.error(` ❌ Error uploading ${key}:`, error.message)
return { uploaded: false, skipped: false, key, error: error.message }
}
}
/**
* Recursively get all files from a directory
*/
async function getAllFiles(dir, baseDir = dir) {
const files = []
const entries = await readdir(dir, { withFileTypes: true })
for (const entry of entries) {
const fullPath = join(dir, entry.name)
if (entry.isDirectory()) {
const subFiles = await getAllFiles(fullPath, baseDir)
files.push(...subFiles)
} else {
const relativePath = relative(baseDir, fullPath)
files.push({ fullPath, relativePath })
}
}
return files
}
/**
* Upload directory to R2 bucket
*/
async function uploadDirectory(
sourceDir,
bucket,
bucketName,
dryRun = false,
incremental = false
) {
if (!existsSync(sourceDir)) {
console.error(`❌ Source directory does not exist: ${sourceDir}`)
return { uploaded: 0, skipped: 0, failed: 0, total: 0 }
}
console.log(`\n📁 Scanning: ${sourceDir}`)
const files = await getAllFiles(sourceDir)
console.log(` Found ${files.length} files`)
if (files.length === 0) {
console.log(" No files to upload")
return { uploaded: 0, skipped: 0, failed: 0, total: 0 }
}
let uploaded = 0
let skipped = 0
let failed = 0
console.log(
`\n${dryRun ? "🔍 [DRY RUN]" : "⬆️ "} Uploading to bucket: ${bucketName}`
)
for (let i = 0; i < files.length; i++) {
const { fullPath, relativePath } = files[i]
const key = relativePath.replace(/\\/g, "/") // Normalize path separators
// Check if file exists (incremental mode)
if (incremental && !dryRun) {
const exists = await objectExists(bucket, key)
if (exists) {
skipped++
if ((i + 1) % 50 === 0) {
process.stdout.write(
`\r Progress: ${i + 1}/${files.length} (${uploaded} uploaded, ${skipped} skipped, ${failed} failed)`
)
}
continue
}
}
const result = await uploadFile(bucket, key, fullPath, dryRun)
if (result.uploaded) {
uploaded++
} else if (result.error) {
failed++
} else if (result.skipped) {
skipped++
}
// Progress indicator
if ((i + 1) % 10 === 0 || i + 1 === files.length) {
process.stdout.write(
`\r Progress: ${i + 1}/${files.length} (${uploaded} uploaded, ${skipped} skipped, ${failed} failed)`
)
}
}
console.log("\n")
return { uploaded, skipped, failed, total: files.length }
}
/**
* Main upload function
*/
async function main() {
console.log("🚀 Manuals Object Storage Upload Script")
console.log(` Endpoint: ${ENDPOINT}`)
console.log(` Mode: ${dryRun ? "DRY RUN" : "UPLOAD"}`)
console.log(` Incremental: ${incremental ? "Yes" : "No"}`)
console.log(` Type: ${uploadType}`)
const results = {
manuals: { uploaded: 0, skipped: 0, failed: 0, total: 0 },
thumbnails: { uploaded: 0, skipped: 0, failed: 0, total: 0 },
}
if (uploadType === "manuals" || uploadType === "all") {
console.log("\n📚 Uploading manuals...")
results.manuals = await uploadDirectory(
MANUALS_SOURCE,
MANUALS_BUCKET,
MANUALS_BUCKET,
dryRun,
incremental
)
}
if (uploadType === "thumbnails" || uploadType === "all") {
console.log("\n🖼 Uploading thumbnails...")
results.thumbnails = await uploadDirectory(
THUMBNAILS_SOURCE,
THUMBNAILS_BUCKET,
THUMBNAILS_BUCKET,
dryRun,
incremental
)
}
// Summary
console.log("\n📊 Upload Summary:")
if (uploadType === "manuals" || uploadType === "all") {
console.log(
` Manuals: ${results.manuals.uploaded} uploaded, ${results.manuals.skipped} skipped, ${results.manuals.failed} failed (${results.manuals.total} total)`
)
}
if (uploadType === "thumbnails" || uploadType === "all") {
console.log(
` Thumbnails: ${results.thumbnails.uploaded} uploaded, ${results.thumbnails.skipped} skipped, ${results.thumbnails.failed} failed (${results.thumbnails.total} total)`
)
}
const totalUploaded = results.manuals.uploaded + results.thumbnails.uploaded
const totalFailed = results.manuals.failed + results.thumbnails.failed
if (totalFailed > 0) {
console.log(`\n⚠️ ${totalFailed} file(s) failed to upload`)
process.exit(1)
} else if (dryRun) {
console.log(
"\n✅ Dry run completed. Remove --dry-run to actually upload files."
)
} else {
console.log(`\n✅ Successfully uploaded ${totalUploaded} file(s) to R2`)
}
}
main().catch((error) => {
console.error("\n❌ Fatal error:", error)
process.exit(1)
})