Utility Scripts
This document details all utility scripts and automation tools used in the STO Education Platform.Scripts Directory Structure
Copy
scripts/
├── generate-sitemap.js # Comprehensive sitemap generation
├── generate-sitemap-simple.js # Simple static sitemap generation
├── update-version.js # Version update automation
├── db-migration-runner.js # Database migration runner
├── asset-optimizer.js # Asset optimization script
├── deployment-check.js # Pre-deployment validation
└── backup-generator.js # Database backup generation
Sitemap Generation Scripts
Comprehensive Sitemap Generator
File:scripts/generate-sitemap.js
Copy
const fs = require('fs')
const path = require('path')
const { createClient } = require('@supabase/supabase-js')
async function generateComprehensiveSitemap() {
const baseUrl = process.env.VITE_APP_URL || 'https://sto-education.com'
try {
// Initialize Supabase client
const supabase = createClient(
process.env.VITE_SUPABASE_URL,
process.env.VITE_SUPABASE_ANON_KEY
)
// Fetch static pages
const staticPages = [
{ url: '/', priority: '1.0', changefreq: 'daily' },
{ url: '/about', priority: '0.8', changefreq: 'monthly' },
{ url: '/courses', priority: '0.9', changefreq: 'weekly' },
{ url: '/teachers', priority: '0.8', changefreq: 'weekly' },
{ url: '/contact', priority: '0.6', changefreq: 'monthly' },
{ url: '/privacy', priority: '0.4', changefreq: 'yearly' },
{ url: '/terms', priority: '0.4', changefreq: 'yearly' }
]
// Fetch dynamic pages from database
const dynamicPages = await fetchDynamicPages(supabase)
// Combine all pages
const allPages = [...staticPages, ...dynamicPages]
// Generate sitemap XML
const sitemap = generateSitemapXML(allPages, baseUrl)
// Write to public directory
const outputPath = path.join(__dirname, '../public/sitemap.xml')
fs.writeFileSync(outputPath, sitemap)
console.log(`✅ Sitemap generated successfully with ${allPages.length} pages`)
console.log(`📁 Output: ${outputPath}`)
} catch (error) {
console.error('❌ Error generating sitemap:', error)
process.exit(1)
}
}
async function fetchDynamicPages(supabase) {
const pages = []
try {
// Fetch courses
const { data: courses } = await supabase
.from('courses')
.select('id, slug, updated_at')
courses?.forEach(course => {
pages.push({
url: `/courses/${course.slug}`,
priority: '0.8',
changefreq: 'weekly',
lastmod: course.updated_at
})
})
// Fetch teachers
const { data: teachers } = await supabase
.from('profiles')
.select('id, username, updated_at')
.eq('role', 'teacher')
teachers?.forEach(teacher => {
pages.push({
url: `/teachers/${teacher.username}`,
priority: '0.7',
changefreq: 'weekly',
lastmod: teacher.updated_at
})
})
// Fetch blog posts
const { data: blogPosts } = await supabase
.from('blog_posts')
.select('id, slug, updated_at')
.eq('published', true)
blogPosts?.forEach(post => {
pages.push({
url: `/blog/${post.slug}`,
priority: '0.6',
changefreq: 'monthly',
lastmod: post.updated_at
})
})
} catch (error) {
console.warn('⚠️ Warning: Could not fetch dynamic pages:', error.message)
}
return pages
}
function generateSitemapXML(pages, baseUrl) {
const urls = pages.map(page => `
<url>
<loc>${baseUrl}${page.url}</loc>
<lastmod>${page.lastmod || new Date().toISOString()}</lastmod>
<changefreq>${page.changefreq}</changefreq>
<priority>${page.priority}</priority>
</url>`).join('')
return `<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${urls}
</urlset>`
}
// Run if called directly
if (require.main === module) {
generateComprehensiveSitemap()
}
module.exports = { generateComprehensiveSitemap }
Simple Sitemap Generator
File:scripts/generate-sitemap-simple.js
Copy
const fs = require('fs')
const path = require('path')
function generateSimpleSitemap() {
const baseUrl = process.env.VITE_APP_URL || 'https://sto-education.com'
const pages = [
{ url: '/', priority: '1.0', changefreq: 'daily' },
{ url: '/courses', priority: '0.9', changefreq: 'weekly' },
{ url: '/teachers', priority: '0.8', changefreq: 'weekly' },
{ url: '/about', priority: '0.7', changefreq: 'monthly' },
{ url: '/contact', priority: '0.6', changefreq: 'monthly' },
{ url: '/privacy', priority: '0.4', changefreq: 'yearly' },
{ url: '/terms', priority: '0.4', changefreq: 'yearly' }
]
const sitemap = generateXML(pages, baseUrl)
const outputPath = path.join(__dirname, '../public/sitemap.xml')
fs.writeFileSync(outputPath, sitemap)
console.log(`✅ Simple sitemap generated with ${pages.length} pages`)
console.log(`📁 Output: ${outputPath}`)
}
function generateXML(pages, baseUrl) {
const urls = pages.map(page => `
<url>
<loc>${baseUrl}${page.url}</loc>
<priority>${page.priority}</priority>
<changefreq>${page.changefreq}</changefreq>
</url>`).join('')
return `<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${urls}
</urlset>`
}
// Run if called directly
if (require.main === module) {
generateSimpleSitemap()
}
module.exports = { generateSimpleSitemap }
Version Management Scripts
Version Update Script
File:scripts/update-version.js
Copy
const fs = require('fs')
const path = require('path')
function updateVersion(type = 'patch') {
const packageJsonPath = path.join(__dirname, '../package.json')
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'))
const [major, minor, patch] = packageJson.version.split('.').map(Number)
let newVersion
switch (type) {
case 'major':
newVersion = `${major + 1}.0.0`
break
case 'minor':
newVersion = `${major}.${minor + 1}.0`
break
case 'patch':
default:
newVersion = `${major}.${minor}.${patch + 1}`
break
}
packageJson.version = newVersion
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2))
console.log(`✅ Version updated to ${newVersion}`)
// Update version in other files if needed
updateVersionInFiles(newVersion)
}
function updateVersionInFiles(version) {
const filesToUpdate = [
'../public/version.json',
'../src/constants/version.ts'
]
filesToUpdate.forEach(file => {
const filePath = path.join(__dirname, file)
if (fs.existsSync(filePath)) {
try {
const content = fs.readFileSync(filePath, 'utf8')
const updated = content.replace(/"version":\s*"[^"]*"/, `"version": "${version}"`)
fs.writeFileSync(filePath, updated)
console.log(`📝 Updated version in ${file}`)
} catch (error) {
console.warn(`⚠️ Could not update ${file}:`, error.message)
}
}
})
}
// Run if called directly
if (require.main === module) {
const versionType = process.argv[2] || 'patch'
updateVersion(versionType)
}
module.exports = { updateVersion }
Database Utility Scripts
Migration Runner Script
File:scripts/db-migration-runner.js
Copy
const fs = require('fs')
const path = require('path')
const { createClient } = require('@supabase/supabase-js')
async function runMigrations() {
const supabase = createClient(
process.env.VITE_SUPABASE_URL,
process.env.VITE_SUPABASE_SERVICE_ROLE_KEY
)
const migrationsDir = path.join(__dirname, '../supabase/migrations')
const migrationFiles = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.sql'))
.sort()
console.log(`🔄 Found ${migrationFiles.length} migration files`)
for (const file of migrationFiles) {
try {
console.log(`📝 Running migration: ${file}`)
const migrationPath = path.join(migrationsDir, file)
const sql = fs.readFileSync(migrationPath, 'utf8')
const { error } = await supabase.rpc('exec_sql', { sql })
if (error) {
console.error(`❌ Migration failed: ${file}`, error)
process.exit(1)
}
console.log(`✅ Migration completed: ${file}`)
} catch (error) {
console.error(`❌ Error running migration ${file}:`, error)
process.exit(1)
}
}
console.log('🎉 All migrations completed successfully')
}
// Run if called directly
if (require.main === module) {
runMigrations()
}
module.exports = { runMigrations }
Backup Generator Script
File:scripts/backup-generator.js
Copy
const fs = require('fs')
const path = require('path')
const { createClient } = require('@supabase/supabase-js')
async function generateBackup() {
const supabase = createClient(
process.env.VITE_SUPABASE_URL,
process.env.VITE_SUPABASE_SERVICE_ROLE_KEY
)
const timestamp = new Date().toISOString().replace(/[:.]/g, '-')
const backupDir = path.join(__dirname, '../backups')
// Create backup directory if it doesn't exist
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true })
}
try {
// Get all tables
const { data: tables } = await supabase
.from('information_schema.tables')
.select('table_name')
.eq('table_schema', 'public')
const backup = {
timestamp,
version: require('../package.json').version,
tables: {}
}
// Backup each table
for (const table of tables) {
console.log(`📦 Backing up table: ${table.table_name}`)
const { data, error } = await supabase
.from(table.table_name)
.select('*')
if (error) {
console.warn(`⚠️ Warning: Could not backup ${table.table_name}:`, error.message)
continue
}
backup.tables[table.table_name] = data
}
// Write backup file
const backupFile = path.join(backupDir, `backup-${timestamp}.json`)
fs.writeFileSync(backupFile, JSON.stringify(backup, null, 2))
console.log(`✅ Backup generated: ${backupFile}`)
// Cleanup old backups (keep last 7 days)
cleanupOldBackups(backupDir)
} catch (error) {
console.error('❌ Error generating backup:', error)
process.exit(1)
}
}
function cleanupOldBackups(backupDir) {
const files = fs.readdirSync(backupDir)
.filter(file => file.startsWith('backup-') && file.endsWith('.json'))
.map(file => ({
name: file,
path: path.join(backupDir, file),
time: fs.statSync(path.join(backupDir, file)).mtime
}))
.sort((a, b) => b.time - a.time)
const cutoffDate = new Date()
cutoffDate.setDate(cutoffDate.getDate() - 7)
const filesToDelete = files.filter(file => file.time < cutoffDate)
filesToDelete.forEach(file => {
fs.unlinkSync(file.path)
console.log(`🗑️ Deleted old backup: ${file.name}`)
})
}
// Run if called directly
if (require.main === module) {
generateBackup()
}
module.exports = { generateBackup }
Asset Optimization Scripts
Asset Optimizer Script
File:scripts/asset-optimizer.js
Copy
const fs = require('fs')
const path = require('path')
const sharp = require('sharp')
async function optimizeAssets() {
const publicDir = path.join(__dirname, '../public')
const assetsDir = path.join(publicDir, 'assets')
if (!fs.existsSync(assetsDir)) {
console.log('📁 Assets directory not found, skipping optimization')
return
}
const imageExtensions = ['.jpg', '.jpeg', '.png', '.webp', '.gif']
const files = fs.readdirSync(assetsDir, { recursive: true })
.filter(file => imageExtensions.some(ext => file.endsWith(ext)))
console.log(`🖼️ Found ${files.length} images to optimize`)
for (const file of files) {
try {
const filePath = path.join(assetsDir, file)
const stats = fs.statSync(filePath)
// Skip if file is already optimized (has -optimized suffix)
if (file.includes('-optimized')) {
continue
}
console.log(`🔄 Optimizing: ${file}`)
const optimizedPath = filePath.replace(/\.(jpg|jpeg|png)$/, '-optimized.webp')
await sharp(filePath)
.webp({ quality: 80 })
.resize(1200, 1200, {
fit: 'inside',
withoutEnlargement: true
})
.toFile(optimizedPath)
const newStats = fs.statSync(optimizedPath)
const savings = ((stats.size - newStats.size) / stats.size * 100).toFixed(1)
console.log(`✅ Optimized: ${file} (${savings}% size reduction)`)
} catch (error) {
console.warn(`⚠️ Could not optimize ${file}:`, error.message)
}
}
console.log('🎉 Asset optimization completed')
}
// Run if called directly
if (require.main === module) {
optimizeAssets()
}
module.exports = { optimizeAssets }
Deployment Validation Scripts
Pre-deployment Check Script
File:scripts/deployment-check.js
Copy
const fs = require('fs')
const path = require('path')
async function runDeploymentChecks() {
console.log('🔍 Running pre-deployment checks...')
const checks = [
checkEnvironmentVariables,
checkBuildFiles,
checkTypeScript,
checkLinting,
checkDependencies
]
let allPassed = true
for (const check of checks) {
try {
await check()
console.log(`✅ ${check.name} passed`)
} catch (error) {
console.error(`❌ ${check.name} failed:`, error.message)
allPassed = false
}
}
if (allPassed) {
console.log('🎉 All deployment checks passed!')
process.exit(0)
} else {
console.log('💥 Deployment checks failed!')
process.exit(1)
}
}
async function checkEnvironmentVariables() {
const requiredVars = [
'VITE_SUPABASE_URL',
'VITE_SUPABASE_ANON_KEY',
'VITE_APP_URL'
]
const missing = requiredVars.filter(varName => !process.env[varName])
if (missing.length > 0) {
throw new Error(`Missing environment variables: ${missing.join(', ')}`)
}
}
async function checkBuildFiles() {
const distDir = path.join(__dirname, '../dist')
if (!fs.existsSync(distDir)) {
throw new Error('Build directory not found. Run npm run build first.')
}
const requiredFiles = ['index.html', 'assets']
for (const file of requiredFiles) {
const filePath = path.join(distDir, file)
if (!fs.existsSync(filePath)) {
throw new Error(`Required build file missing: ${file}`)
}
}
}
async function checkTypeScript() {
const { exec } = require('child_process')
const { promisify } = require('util')
const execAsync = promisify(exec)
try {
await execAsync('npm run type-check')
} catch (error) {
throw new Error('TypeScript type checking failed')
}
}
async function checkLinting() {
const { exec } = require('child_process')
const { promisify } = require('util')
const execAsync = promisify(exec)
try {
await execAsync('npm run lint')
} catch (error) {
throw new Error('ESLint checks failed')
}
}
async function checkDependencies() {
const packageJsonPath = path.join(__dirname, '../package.json')
const packageLockPath = path.join(__dirname, '../package-lock.json')
if (!fs.existsSync(packageJsonPath)) {
throw new Error('package.json not found')
}
if (!fs.existsSync(packageLockPath)) {
throw new Error('package-lock.json not found')
}
// Check for known security vulnerabilities
const { exec } = require('child_process')
const { promisify } = require('util')
const execAsync = promisify(exec)
try {
await execAsync('npm audit --audit-level moderate')
} catch (error) {
throw new Error('Security vulnerabilities found in dependencies')
}
}
// Run if called directly
if (require.main === module) {
runDeploymentChecks()
}
module.exports = { runDeploymentChecks }
Script Usage Examples
Package.json Scripts
Copy
{
"scripts": {
"generate-sitemap": "node scripts/generate-sitemap.js",
"generate-sitemap:simple": "node scripts/generate-sitemap-simple.js",
"update-version": "node scripts/update-version.js",
"run-migrations": "node scripts/db-migration-runner.js",
"optimize-assets": "node scripts/asset-optimizer.js",
"deployment-check": "node scripts/deployment-check.js",
"generate-backup": "node scripts/backup-generator.js"
}
}
Command Line Usage
Copy
# Generate comprehensive sitemap
npm run generate-sitemap
# Generate simple sitemap
npm run generate-sitemap:simple
# Update version (patch, minor, major)
npm run update-version patch
npm run update-version minor
npm run update-version major
# Run database migrations
npm run run-migrations
# Optimize assets
npm run optimize-assets
# Run deployment checks
npm run deployment-check
# Generate database backup
npm run generate-backup
Script Integration
CI/CD Integration
Copy
# .github/workflows/deploy.yml
name: Deploy
on:
push:
branches: [main]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: '18'
- run: npm install
- run: npm run deployment-check
- run: npm run build
- run: npm run generate-sitemap
- run: npm run optimize-assets
- name: Deploy to Vercel
uses: amondnet/vercel-action@v20
with:
vercel-token: ${{ secrets.VERCEL_TOKEN }}
vercel-org-id: ${{ secrets.ORG_ID }}
vercel-project-id: ${{ secrets.PROJECT_ID }}
Pre-commit Hooks
Copy
{
"husky": {
"hooks": {
"pre-commit": "npm run deployment-check && npm run generate-sitemap"
}
}
}
Error Handling
Script Error Handling
All scripts include comprehensive error handling:- Graceful Degradation: Scripts continue running even if some operations fail
- Detailed Logging: Clear success and error messages
- Exit Codes: Proper exit codes for CI/CD integration
- Validation: Input validation and environment checks
Common Error Scenarios
- Missing Dependencies: Check if required packages are installed
- Environment Variables: Validate required environment variables
- File Permissions: Ensure proper file system permissions
- Network Issues: Handle API timeouts and connection errors
Related Documentation
- Build Scripts - Build and compilation scripts
- Package Configuration - Dependencies and scripts
- Environment Configuration - Environment variables
- Database Migrations - Database schema changes