feat(i18n): add translation export/import scripts for production deployment

Added Scripts:
- export-translations.js: Export all translations from MongoDB to JSON
- import-translations.js: Import translations into production database

Purpose:
- Avoid re-running DeepL API on production (saves quota)
- Enable dev-to-prod translation deployment workflow
- Support dry-run and force-overwrite modes

Usage:
- Export: node scripts/export-translations.js /tmp/translations-export.json
- Import: node scripts/import-translations.js /tmp/translations-export.json

Deployment Workflow:
1. Export translations from dev
2. Deploy code to production via deploy.sh
3. Copy export file to production
4. Import translations on production

🌐 Generated with Claude Code

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
TheFlow 2025-10-26 01:31:59 +13:00
parent cfa57465de
commit 27963b4913
2 changed files with 252 additions and 0 deletions

View file

@ -0,0 +1,96 @@
#!/usr/bin/env node
/**
* Export Translations Script
*
* Exports all translations from the local database to a JSON file
* for deployment to production without re-running DeepL API
*
* Usage:
* node scripts/export-translations.js [output-file]
*
* Default output: /tmp/translations-export.json
*/
require('dotenv').config();
const mongoose = require('mongoose');
const fs = require('fs');
const path = require('path');
const Document = require('../src/models/Document.model');
async function main() {
const outputFile = process.argv[2] || '/tmp/translations-export.json';
console.log('═══════════════════════════════════════════════════════════');
console.log(' EXPORT TRANSLATIONS');
console.log('═══════════════════════════════════════════════════════════\n');
// Connect to MongoDB
console.log('📡 Connecting to MongoDB...');
await mongoose.connect(process.env.MONGODB_URI || 'mongodb://localhost:27017/tractatus_dev', {
serverSelectionTimeoutMS: 5000
});
console.log('✓ Connected\n');
// Fetch all documents with translations
console.log('📚 Fetching documents with translations...');
const documents = await Document.list({
filter: { visibility: 'public' },
limit: 1000,
sort: { order: 1 }
});
const exportData = {
exported_at: new Date().toISOString(),
source_database: process.env.MONGODB_URI || 'mongodb://localhost:27017/tractatus_dev',
total_documents: documents.length,
documents: []
};
let totalTranslations = 0;
for (const doc of documents) {
if (doc.translations && Object.keys(doc.translations).length > 0) {
const docExport = {
slug: doc.slug,
_id: doc._id.toString(),
translations: doc.translations
};
exportData.documents.push(docExport);
const langCount = Object.keys(doc.translations).length;
totalTranslations += langCount;
console.log(`${doc.slug}: ${langCount} translation(s)`);
}
}
exportData.total_translations = totalTranslations;
// Write to file
console.log(`\n💾 Writing to ${outputFile}...`);
fs.writeFileSync(outputFile, JSON.stringify(exportData, null, 2), 'utf8');
console.log('✓ Export complete\n');
// Summary
console.log('═══════════════════════════════════════════════════════════');
console.log(' EXPORT SUMMARY');
console.log('═══════════════════════════════════════════════════════════\n');
console.log(` Documents with translations: ${exportData.documents.length}`);
console.log(` Total translations: ${totalTranslations}`);
console.log(` Output file: ${outputFile}`);
console.log(` File size: ${(fs.statSync(outputFile).size / 1024).toFixed(2)} KB\n`);
await mongoose.disconnect();
console.log('✓ Database disconnected\n');
process.exit(0);
}
// Run
main().catch(err => {
console.error('\n❌ Fatal error:', err.message);
console.error(err.stack);
process.exit(1);
});

View file

@ -0,0 +1,156 @@
#!/usr/bin/env node
/**
* Import Translations Script
*
* Imports translations from a JSON export file into the database
* Used to deploy translations from dev to production
*
* Usage:
* node scripts/import-translations.js <input-file>
*
* Options:
* --dry-run Preview import without making changes
* --force Overwrite existing translations
*/
require('dotenv').config();
const mongoose = require('mongoose');
const fs = require('fs');
const Document = require('../src/models/Document.model');
// Parse arguments
const args = process.argv.slice(2);
const options = {
dryRun: args.includes('--dry-run'),
force: args.includes('--force'),
inputFile: args.find(arg => !arg.startsWith('--'))
};
if (!options.inputFile) {
console.error('❌ ERROR: Input file required');
console.error('Usage: node scripts/import-translations.js <input-file> [--dry-run] [--force]');
process.exit(1);
}
async function main() {
console.log('═══════════════════════════════════════════════════════════');
console.log(' IMPORT TRANSLATIONS');
console.log('═══════════════════════════════════════════════════════════\n');
if (options.dryRun) {
console.log('🔍 DRY-RUN MODE - No changes will be made\n');
}
// Load import file
console.log(`📁 Loading ${options.inputFile}...`);
if (!fs.existsSync(options.inputFile)) {
console.error(`❌ ERROR: File not found: ${options.inputFile}`);
process.exit(1);
}
const importData = JSON.parse(fs.readFileSync(options.inputFile, 'utf8'));
console.log(`✓ Loaded export from ${importData.exported_at}`);
console.log(`✓ Source: ${importData.source_database}`);
console.log(`✓ Documents: ${importData.documents.length}`);
console.log(`✓ Translations: ${importData.total_translations}\n`);
// Connect to MongoDB
console.log('📡 Connecting to MongoDB...');
await mongoose.connect(process.env.MONGODB_URI || 'mongodb://localhost:27017/tractatus_dev', {
serverSelectionTimeoutMS: 5000
});
console.log(`✓ Connected to ${process.env.MONGODB_URI || 'mongodb://localhost:27017/tractatus_dev'}\n`);
// Import each document
const stats = {
total: importData.documents.length,
imported: 0,
skipped: 0,
failed: 0,
errors: []
};
console.log('📚 Importing translations...\n');
for (const docData of importData.documents) {
try {
// Find document by slug (more reliable than _id across environments)
const doc = await Document.findBySlug(docData.slug);
if (!doc) {
console.log(`${docData.slug}: Document not found, skipping`);
stats.skipped++;
continue;
}
// Check if translations already exist
const hasExisting = doc.translations && Object.keys(doc.translations).length > 0;
if (hasExisting && !options.force) {
console.log(`${docData.slug}: Already has translations (use --force to overwrite)`);
stats.skipped++;
continue;
}
if (options.dryRun) {
const langCount = Object.keys(docData.translations).length;
console.log(` 🔍 ${docData.slug}: Would import ${langCount} translation(s) ${hasExisting ? '(overwrite)' : '(new)'}`);
stats.imported++;
continue;
}
// Import translations
await Document.update(doc._id.toString(), {
translations: docData.translations
});
const langCount = Object.keys(docData.translations).length;
console.log(`${docData.slug}: Imported ${langCount} translation(s)`);
stats.imported++;
} catch (error) {
console.error(`${docData.slug}: ${error.message}`);
stats.failed++;
stats.errors.push({
slug: docData.slug,
error: error.message
});
}
}
// Summary
console.log('\n═══════════════════════════════════════════════════════════');
console.log(' IMPORT SUMMARY');
console.log('═══════════════════════════════════════════════════════════\n');
if (options.dryRun) {
console.log(' Dry run complete - no changes were made\n');
}
console.log(` Total documents: ${stats.total}`);
console.log(` Imported: ${stats.imported}`);
console.log(` Skipped: ${stats.skipped}`);
console.log(` Failed: ${stats.failed}\n`);
if (stats.errors.length > 0) {
console.log(' Errors:');
stats.errors.forEach(err => {
console.log(` - ${err.slug}: ${err.error}`);
});
console.log('');
}
await mongoose.disconnect();
console.log('✓ Database disconnected\n');
process.exit(stats.failed > 0 ? 1 : 0);
}
// Run
main().catch(err => {
console.error('\n❌ Fatal error:', err.message);
console.error(err.stack);
process.exit(1);
});