feat: add page view log

This commit is contained in:
KazooTTT
2024-11-23 21:29:56 +08:00
parent ff5a3a50bd
commit b040521893
5 changed files with 73 additions and 17 deletions

View File

@ -0,0 +1,13 @@
-- Create pageview_logs table for detailed analytics
CREATE TABLE IF NOT EXISTS pageview_logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
slug TEXT NOT NULL,
ip_address TEXT,
user_agent TEXT,
referrer TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
-- Create index on slug and created_at for efficient querying
CREATE INDEX IF NOT EXISTS idx_pageview_logs_slug ON pageview_logs(slug);
CREATE INDEX IF NOT EXISTS idx_pageview_logs_created_at ON pageview_logs(created_at);

View File

@ -9,7 +9,7 @@
"preview": "astro preview", "preview": "astro preview",
"astro": "astro", "astro": "astro",
"deploy": "wrangler pages deploy ./dist", "deploy": "wrangler pages deploy ./dist",
"deploy:db": "node -e \"require('dotenv').config()\" && wrangler d1 execute blog-pageviews --file=./migrations/0000_create_pageviews.sql --remote", "deploy:db": "node scripts/deploy-db.cjs",
"lint": "prettier --write \"**/*.{js,jsx,ts,tsx,md,mdx,svelte,astro}\" && eslint --fix \"src/**/*.{js,ts,jsx,tsx,svelte,astro}\"", "lint": "prettier --write \"**/*.{js,jsx,ts,tsx,md,mdx,svelte,astro}\" && eslint --fix \"src/**/*.{js,ts,jsx,tsx,svelte,astro}\"",
"sort": "node scripts/updateCategoryBatchly.cjs" "sort": "node scripts/updateCategoryBatchly.cjs"
}, },

34
scripts/deploy-db.cjs Normal file
View File

@ -0,0 +1,34 @@
require('dotenv').config();
const { execSync } = require('child_process');
const path = require('path');
const fs = require('fs');
// Check for required environment variable
if (!process.env.CLOUDFLARE_API_TOKEN) {
console.error('❌ CLOUDFLARE_API_TOKEN environment variable is required');
process.exit(1);
}
// Get all SQL files from migrations directory
const migrationsDir = path.join(__dirname, '..', 'migrations');
const migrations = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.sql'))
.sort(); // Sort to ensure consistent order
console.log('Found migrations:', migrations);
// Execute each migration
migrations.forEach(migration => {
const migrationPath = path.join('migrations', migration);
console.log(`\nExecuting migration: ${migration}`);
try {
execSync(`wrangler d1 execute blog-pageviews --file=./${migrationPath} --remote`, {
stdio: 'inherit'
});
console.log(`✅ Successfully executed ${migration}`);
} catch (error) {
console.error(`❌ Failed to execute ${migration}`);
process.exit(1);
}
});

View File

@ -107,17 +107,27 @@ export const POST: APIRoute = async ({ params, locals, request }) => {
} }
try { try {
// Insert or update view count // Get request information
await db const ip_address = request.headers.get('cf-connecting-ip') || request.headers.get('x-forwarded-for') || 'unknown';
.prepare( const user_agent = request.headers.get('user-agent') || 'unknown';
`INSERT INTO pageviews (slug, views) const referrer = request.headers.get('referer') || 'unknown';
VALUES (?, 1)
ON CONFLICT(slug) // Start a transaction to ensure both operations succeed or fail together
DO UPDATE SET views = views + 1, const stmt1 = db.prepare(
updated_at = CURRENT_TIMESTAMP` `INSERT INTO pageviews (slug, views)
) VALUES (?, 1)
.bind(slug) ON CONFLICT(slug)
.run(); DO UPDATE SET views = views + 1,
updated_at = CURRENT_TIMESTAMP`
).bind(slug);
const stmt2 = db.prepare(
`INSERT INTO pageview_logs (slug, ip_address, user_agent, referrer)
VALUES (?, ?, ?, ?)`
).bind(slug, ip_address, user_agent, referrer);
// Execute both statements in a transaction
await db.batch([stmt1, stmt2]);
// Get updated view count // Get updated view count
const { results } = await db const { results } = await db

View File

@ -1,13 +1,12 @@
name = "kazoottt-blog" name = "kazoottt-blog"
compatibility_date = "2024-01-01" compatibility_date = "2024-01-01"
[[d1_databases]]
binding = "DB"
database_name = "blog-pageviews"
database_id = "ab9e5f7d-e254-4e7d-bd85-5d944a622682"
[build] [build]
command = "pnpm build" command = "pnpm build"
[build.upload] [build.upload]
format = "directory" format = "directory"
[site]
bucket = "./dist"
pages_build_output_dir = "dist"