diff --git a/migrations/0001_create_pageview_logs.sql b/migrations/0001_create_pageview_logs.sql new file mode 100644 index 0000000..314a6ce --- /dev/null +++ b/migrations/0001_create_pageview_logs.sql @@ -0,0 +1,13 @@ +-- Create pageview_logs table for detailed analytics +CREATE TABLE IF NOT EXISTS pageview_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + slug TEXT NOT NULL, + ip_address TEXT, + user_agent TEXT, + referrer TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); + +-- Create index on slug and created_at for efficient querying +CREATE INDEX IF NOT EXISTS idx_pageview_logs_slug ON pageview_logs(slug); +CREATE INDEX IF NOT EXISTS idx_pageview_logs_created_at ON pageview_logs(created_at); diff --git a/package.json b/package.json index 74ae779..a9d7770 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "preview": "astro preview", "astro": "astro", "deploy": "wrangler pages deploy ./dist", - "deploy:db": "node -e \"require('dotenv').config()\" && wrangler d1 execute blog-pageviews --file=./migrations/0000_create_pageviews.sql --remote", + "deploy:db": "node scripts/deploy-db.cjs", "lint": "prettier --write \"**/*.{js,jsx,ts,tsx,md,mdx,svelte,astro}\" && eslint --fix \"src/**/*.{js,ts,jsx,tsx,svelte,astro}\"", "sort": "node scripts/updateCategoryBatchly.cjs" }, diff --git a/scripts/deploy-db.cjs b/scripts/deploy-db.cjs new file mode 100644 index 0000000..db400d0 --- /dev/null +++ b/scripts/deploy-db.cjs @@ -0,0 +1,34 @@ +require('dotenv').config(); +const { execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); + +// Check for required environment variable +if (!process.env.CLOUDFLARE_API_TOKEN) { + console.error('❌ CLOUDFLARE_API_TOKEN environment variable is required'); + process.exit(1); +} + +// Get all SQL files from migrations directory +const migrationsDir = path.join(__dirname, '..', 'migrations'); +const migrations = fs.readdirSync(migrationsDir) + .filter(file => file.endsWith('.sql')) + .sort(); // Sort to ensure consistent order + +console.log('Found migrations:', migrations); + +// Execute each migration +migrations.forEach(migration => { + const migrationPath = path.join('migrations', migration); + console.log(`\nExecuting migration: ${migration}`); + + try { + execSync(`wrangler d1 execute blog-pageviews --file=./${migrationPath} --remote`, { + stdio: 'inherit' + }); + console.log(`✅ Successfully executed ${migration}`); + } catch (error) { + console.error(`❌ Failed to execute ${migration}`); + process.exit(1); + } +}); diff --git a/src/pages/api/pageview/[slug].ts b/src/pages/api/pageview/[slug].ts index 49e1116..5d1aa24 100644 --- a/src/pages/api/pageview/[slug].ts +++ b/src/pages/api/pageview/[slug].ts @@ -107,17 +107,27 @@ export const POST: APIRoute = async ({ params, locals, request }) => { } try { - // Insert or update view count - await db - .prepare( - `INSERT INTO pageviews (slug, views) - VALUES (?, 1) - ON CONFLICT(slug) - DO UPDATE SET views = views + 1, - updated_at = CURRENT_TIMESTAMP` - ) - .bind(slug) - .run(); + // Get request information + const ip_address = request.headers.get('cf-connecting-ip') || request.headers.get('x-forwarded-for') || 'unknown'; + const user_agent = request.headers.get('user-agent') || 'unknown'; + const referrer = request.headers.get('referer') || 'unknown'; + + // Start a transaction to ensure both operations succeed or fail together + const stmt1 = db.prepare( + `INSERT INTO pageviews (slug, views) + VALUES (?, 1) + ON CONFLICT(slug) + DO UPDATE SET views = views + 1, + updated_at = CURRENT_TIMESTAMP` + ).bind(slug); + + const stmt2 = db.prepare( + `INSERT INTO pageview_logs (slug, ip_address, user_agent, referrer) + VALUES (?, ?, ?, ?)` + ).bind(slug, ip_address, user_agent, referrer); + + // Execute both statements in a transaction + await db.batch([stmt1, stmt2]); // Get updated view count const { results } = await db diff --git a/wrangler.toml b/wrangler.toml index 0bbb9b3..c82b1cd 100644 --- a/wrangler.toml +++ b/wrangler.toml @@ -1,13 +1,12 @@ name = "kazoottt-blog" compatibility_date = "2024-01-01" -[[d1_databases]] -binding = "DB" -database_name = "blog-pageviews" -database_id = "ab9e5f7d-e254-4e7d-bd85-5d944a622682" - [build] command = "pnpm build" [build.upload] format = "directory" + +[site] +bucket = "./dist" +pages_build_output_dir = "dist" \ No newline at end of file