update 9/20/25
This commit is contained in:
227
functions/migrations/001_create_initial_schema.sql
Normal file
227
functions/migrations/001_create_initial_schema.sql
Normal file
@@ -0,0 +1,227 @@
|
||||
-- Initial migration to create all tables for the Auditly application
|
||||
-- Migration: 001_create_initial_schema.sql
|
||||
|
||||
-- Create users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
display_name VARCHAR(255),
|
||||
email_verified BOOLEAN DEFAULT false,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
last_login_at BIGINT,
|
||||
updated_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000
|
||||
);
|
||||
|
||||
-- Create organizations table
|
||||
CREATE TABLE IF NOT EXISTS organizations (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
owner_id VARCHAR(255) NOT NULL REFERENCES users(id),
|
||||
industry VARCHAR(255),
|
||||
size VARCHAR(255),
|
||||
description TEXT,
|
||||
mission TEXT,
|
||||
vision TEXT,
|
||||
values TEXT,
|
||||
founding_year VARCHAR(255),
|
||||
evolution TEXT,
|
||||
major_milestones TEXT,
|
||||
advantages TEXT,
|
||||
vulnerabilities TEXT,
|
||||
competitors TEXT,
|
||||
market_position TEXT,
|
||||
current_challenges TEXT,
|
||||
short_term_goals TEXT,
|
||||
long_term_goals TEXT,
|
||||
key_metrics TEXT,
|
||||
culture_description TEXT,
|
||||
work_environment TEXT,
|
||||
leadership_style TEXT,
|
||||
communication_style TEXT,
|
||||
additional_context TEXT,
|
||||
onboarding_completed BOOLEAN DEFAULT false,
|
||||
onboarding_data JSONB,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
updated_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
|
||||
-- Subscription fields
|
||||
subscription_status VARCHAR(50) DEFAULT 'trial',
|
||||
stripe_customer_id VARCHAR(255),
|
||||
stripe_subscription_id VARCHAR(255),
|
||||
current_period_start BIGINT,
|
||||
current_period_end BIGINT,
|
||||
trial_end BIGINT,
|
||||
|
||||
-- Usage tracking
|
||||
employee_count INTEGER DEFAULT 0,
|
||||
reports_generated INTEGER DEFAULT 0,
|
||||
last_report_generation BIGINT,
|
||||
|
||||
-- Settings
|
||||
allowed_employee_count INTEGER DEFAULT 50,
|
||||
features_enabled JSONB DEFAULT '{
|
||||
"aiReports": true,
|
||||
"chat": true,
|
||||
"analytics": true
|
||||
}'::jsonb
|
||||
);
|
||||
|
||||
-- Create user_organizations junction table for multi-org support
|
||||
CREATE TABLE IF NOT EXISTS user_organizations (
|
||||
user_id VARCHAR(255) REFERENCES users(id) ON DELETE CASCADE,
|
||||
organization_id VARCHAR(255) REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
role VARCHAR(50) NOT NULL DEFAULT 'employee', -- owner, admin, employee
|
||||
joined_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
onboarding_completed BOOLEAN DEFAULT false,
|
||||
PRIMARY KEY (user_id, organization_id)
|
||||
);
|
||||
|
||||
-- Create employees table
|
||||
CREATE TABLE IF NOT EXISTS employees (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
organization_id VARCHAR(255) NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
user_id VARCHAR(255) REFERENCES users(id) ON DELETE SET NULL, -- nullable for invite-only employees
|
||||
name VARCHAR(255) NOT NULL,
|
||||
email VARCHAR(255) NOT NULL,
|
||||
role VARCHAR(255) DEFAULT 'employee',
|
||||
department VARCHAR(255) DEFAULT 'General',
|
||||
status VARCHAR(50) DEFAULT 'invited', -- invited, active, inactive
|
||||
invite_code VARCHAR(255),
|
||||
joined_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
updated_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
|
||||
UNIQUE(organization_id, email)
|
||||
);
|
||||
|
||||
-- Create auth_tokens table
|
||||
CREATE TABLE IF NOT EXISTS auth_tokens (
|
||||
token VARCHAR(255) PRIMARY KEY,
|
||||
user_id VARCHAR(255) NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
expires_at BIGINT NOT NULL,
|
||||
last_used_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
is_active BOOLEAN DEFAULT true
|
||||
);
|
||||
|
||||
-- Create otps table for email verification
|
||||
CREATE TABLE IF NOT EXISTS otps (
|
||||
email VARCHAR(255) PRIMARY KEY,
|
||||
otp VARCHAR(6) NOT NULL,
|
||||
expires_at BIGINT NOT NULL,
|
||||
attempts INTEGER DEFAULT 0,
|
||||
invite_code VARCHAR(255),
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000
|
||||
);
|
||||
|
||||
-- Create invites table
|
||||
CREATE TABLE IF NOT EXISTS invites (
|
||||
code VARCHAR(255) PRIMARY KEY,
|
||||
organization_id VARCHAR(255) NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
email VARCHAR(255) NOT NULL,
|
||||
employee_data JSONB NOT NULL, -- Contains employee info like name, role, department
|
||||
status VARCHAR(50) DEFAULT 'pending', -- pending, consumed, expired
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
expires_at BIGINT NOT NULL,
|
||||
consumed_at BIGINT,
|
||||
consumed_by VARCHAR(255)
|
||||
);
|
||||
|
||||
-- Create submissions table
|
||||
CREATE TABLE IF NOT EXISTS submissions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
employee_id VARCHAR(255) NOT NULL REFERENCES employees(id) ON DELETE CASCADE,
|
||||
organization_id VARCHAR(255) NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
answers JSONB NOT NULL,
|
||||
status VARCHAR(50) DEFAULT 'completed',
|
||||
submission_type VARCHAR(50) DEFAULT 'regular', -- regular, invite
|
||||
invite_code VARCHAR(255),
|
||||
submitted_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
|
||||
UNIQUE(employee_id) -- One submission per employee
|
||||
);
|
||||
|
||||
-- Create employee_reports table
|
||||
CREATE TABLE IF NOT EXISTS employee_reports (
|
||||
id SERIAL PRIMARY KEY,
|
||||
employee_id VARCHAR(255) NOT NULL REFERENCES employees(id) ON DELETE CASCADE,
|
||||
organization_id VARCHAR(255) NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
employee_name VARCHAR(255) NOT NULL,
|
||||
role VARCHAR(255),
|
||||
email VARCHAR(255),
|
||||
summary TEXT,
|
||||
submission_id INTEGER REFERENCES submissions(id),
|
||||
company_context JSONB,
|
||||
|
||||
-- Report sections
|
||||
role_and_output JSONB,
|
||||
insights JSONB,
|
||||
strengths TEXT[],
|
||||
weaknesses TEXT[],
|
||||
opportunities JSONB[],
|
||||
risks TEXT[],
|
||||
recommendations TEXT[],
|
||||
grading_overview JSONB,
|
||||
|
||||
generated_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
updated_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
|
||||
UNIQUE(employee_id) -- One report per employee
|
||||
);
|
||||
|
||||
-- Create company_reports table
|
||||
CREATE TABLE IF NOT EXISTS company_reports (
|
||||
id SERIAL PRIMARY KEY,
|
||||
organization_id VARCHAR(255) NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
report_data JSONB NOT NULL, -- Contains the full company report structure
|
||||
executive_summary TEXT,
|
||||
generated_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
created_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000,
|
||||
updated_at BIGINT DEFAULT EXTRACT(epoch FROM NOW()) * 1000
|
||||
);
|
||||
|
||||
-- Create indexes for better performance
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_organizations_owner ON organizations(owner_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_organizations_user ON user_organizations(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_organizations_org ON user_organizations(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_employees_org ON employees(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_employees_user ON employees(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_employees_email ON employees(organization_id, email);
|
||||
CREATE INDEX IF NOT EXISTS idx_auth_tokens_user ON auth_tokens(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_auth_tokens_expires ON auth_tokens(expires_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_invites_org ON invites(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_invites_email ON invites(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_invites_status ON invites(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_submissions_employee ON submissions(employee_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_submissions_org ON submissions(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_employee_reports_employee ON employee_reports(employee_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_employee_reports_org ON employee_reports(organization_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_company_reports_org ON company_reports(organization_id);
|
||||
|
||||
-- Create trigger function to update updated_at timestamp
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = EXTRACT(epoch FROM NOW()) * 1000;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
|
||||
-- Create triggers to automatically update updated_at
|
||||
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_organizations_updated_at BEFORE UPDATE ON organizations
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_employees_updated_at BEFORE UPDATE ON employees
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_employee_reports_updated_at BEFORE UPDATE ON employee_reports
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_company_reports_updated_at BEFORE UPDATE ON company_reports
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
106
functions/migrations/002_fix_company_report.sql
Normal file
106
functions/migrations/002_fix_company_report.sql
Normal file
@@ -0,0 +1,106 @@
|
||||
DROP TABLE IF EXISTS company_reports; -- it's empty
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
DO $a$
|
||||
BEGIN
|
||||
CREATE TYPE department_breakdown_item AS (
|
||||
"department" VARCHAR,
|
||||
"count" int
|
||||
);
|
||||
CREATE TYPE company_report_overview AS (
|
||||
"total_employees" int,
|
||||
"department_breakdown" department_breakdown_item[],
|
||||
"submissionRate" double precision,
|
||||
"last_updated" TIMESTAMP,
|
||||
"average_performance_score" double precision,
|
||||
"risk_level" VARCHAR
|
||||
);
|
||||
CREATE TYPE weaknesses AS (
|
||||
"title" VARCHAR,
|
||||
"description" TEXT
|
||||
);
|
||||
CREATE TYPE new_hire AS (
|
||||
"name" VARCHAR,
|
||||
"department" VARCHAR,
|
||||
"role" VARCHAR,
|
||||
"impact" TEXT
|
||||
);
|
||||
CREATE TYPE promotion AS (
|
||||
"name" VARCHAR,
|
||||
"from_role" VARCHAR,
|
||||
"to_role" VARCHAR,
|
||||
"impact" TEXT
|
||||
);
|
||||
CREATE TYPE departure AS (
|
||||
"name" VARCHAR,
|
||||
"department" VARCHAR,
|
||||
"reason" TEXT,
|
||||
"impact" TEXT
|
||||
|
||||
);
|
||||
CREATE TYPE personnel_changes AS (
|
||||
"new_hires" new_hire[],
|
||||
"promotions" promotion[],
|
||||
"departures" departure[]
|
||||
);
|
||||
CREATE TYPE immediate_hiring_needs AS (
|
||||
"department" VARCHAR,
|
||||
"role" VARCHAR,
|
||||
"priority" VARCHAR,
|
||||
"reasoning" TEXT
|
||||
);
|
||||
CREATE TYPE forward_operating_plan AS (
|
||||
"title" VARCHAR,
|
||||
"details" TEXT
|
||||
);
|
||||
CREATE TYPE organizational_impact_summary_employee AS (
|
||||
"employee_name" VARCHAR,
|
||||
"impact" TEXT,
|
||||
"description" TEXT,
|
||||
"suggested_pay" double precision
|
||||
);
|
||||
CREATE TYPE organizational_impact_summary AS (
|
||||
"category" VARCHAR,
|
||||
"employees" organizational_impact_summary_employee[]
|
||||
);
|
||||
CREATE TYPE team_score AS (
|
||||
"employee_name" VARCHAR,
|
||||
"grade" VARCHAR,
|
||||
"reliability" double precision,
|
||||
"role_fit" double precision,
|
||||
"scalability" double precision,
|
||||
"output" double precision,
|
||||
"initiative" double precision
|
||||
);
|
||||
CREATE TYPE grading_breakdown AS (
|
||||
"department_name_short" VARCHAR,
|
||||
"department_name" VARCHAR,
|
||||
"lead" VARCHAR,
|
||||
"support" VARCHAR,
|
||||
"department_grade" VARCHAR,
|
||||
"executive_summary" TEXT,
|
||||
"team_scores" team_score[]
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN RAISE NOTICE 'Type department_breakdown_item or company_report_overview already exists, skipping creation.';
|
||||
END
|
||||
$a$ LANGUAGE plpgsql;
|
||||
|
||||
END
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS company_reports (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"organization_id" VARCHAR NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
"prompt_used" TEXT,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"overview" company_report_overview,
|
||||
"strengths" TEXT[],
|
||||
"weaknesses" weaknesses[],
|
||||
"personnel_changes" personnel_changes,
|
||||
"immediate_hiring_needs" immediate_hiring_needs[],
|
||||
"forward_operating_plan" forward_operating_plan[],
|
||||
"organizational_impact_summary" organizational_impact_summary[],
|
||||
"grading_breakdown" grading_breakdown[]
|
||||
);
|
||||
131
functions/migrations/migrate.js
Normal file
131
functions/migrations/migrate.js
Normal file
@@ -0,0 +1,131 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { executeQuery, executeTransaction } = require('../database');
|
||||
|
||||
/**
|
||||
* Run all database migrations
|
||||
*/
|
||||
async function runMigrations() {
|
||||
console.log('🚀 Starting database migrations...');
|
||||
|
||||
try {
|
||||
// Create migrations table to track what's been run
|
||||
await executeQuery(`
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
filename VARCHAR(255) NOT NULL UNIQUE,
|
||||
executed_at BIGINT NOT NULL DEFAULT EXTRACT(epoch FROM NOW()) * 1000
|
||||
);
|
||||
`);
|
||||
|
||||
// Get list of executed migrations
|
||||
const executedMigrations = await executeQuery(
|
||||
'SELECT filename FROM migrations ORDER BY executed_at'
|
||||
);
|
||||
const executedFiles = new Set(executedMigrations.map(m => m.filename));
|
||||
|
||||
// Read migration files
|
||||
const migrationsDir = path.join(__dirname);
|
||||
const migrationFiles = fs.readdirSync(migrationsDir)
|
||||
.filter(file => file.endsWith('.sql'))
|
||||
.sort();
|
||||
|
||||
console.log(`📁 Found ${migrationFiles.length} migration files`);
|
||||
|
||||
for (const file of migrationFiles) {
|
||||
if (executedFiles.has(file)) {
|
||||
console.log(`⏭️ Skipping ${file} (already executed)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`🔄 Executing ${file}...`);
|
||||
|
||||
const filePath = path.join(migrationsDir, file);
|
||||
const sql = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
await executeTransaction(async (client) => {
|
||||
// Execute the migration SQL
|
||||
if (process.env.USE_NEON_SERVERLESS === 'true') {
|
||||
// For Neon serverless, split by statements and execute individually
|
||||
const statements = sql
|
||||
.split(';')
|
||||
.map(s => s.trim())
|
||||
.filter(s => s.length > 0);
|
||||
|
||||
for (const statement of statements) {
|
||||
if (statement.trim()) {
|
||||
await client(statement);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await client.query(sql);
|
||||
}
|
||||
|
||||
// Record that this migration was executed
|
||||
if (process.env.USE_NEON_SERVERLESS === 'true') {
|
||||
await client('INSERT INTO migrations (filename) VALUES ($1)', [file]);
|
||||
} else {
|
||||
await client.query('INSERT INTO migrations (filename) VALUES ($1)', [file]);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`✅ Completed ${file}`);
|
||||
}
|
||||
|
||||
console.log('🎉 All migrations completed successfully!');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Migration failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new migration file
|
||||
* @param {string} name - Migration name
|
||||
*/
|
||||
function createMigration(name) {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').split('T')[0];
|
||||
const filename = `${timestamp}_${name}.sql`;
|
||||
const filepath = path.join(__dirname, filename);
|
||||
|
||||
const template = `-- Migration: ${filename}
|
||||
-- Description: ${name}
|
||||
|
||||
-- Add your SQL here
|
||||
|
||||
`;
|
||||
|
||||
fs.writeFileSync(filepath, template);
|
||||
console.log(`📝 Created migration: ${filename}`);
|
||||
return filename;
|
||||
}
|
||||
|
||||
// Run migrations if this file is executed directly
|
||||
if (require.main === module) {
|
||||
const command = process.argv[2];
|
||||
|
||||
if (command === 'create') {
|
||||
const name = process.argv[3];
|
||||
if (!name) {
|
||||
console.error('❌ Please provide a migration name: npm run db:migrate create migration_name');
|
||||
process.exit(1);
|
||||
}
|
||||
createMigration(name);
|
||||
} else {
|
||||
runMigrations()
|
||||
.then(() => {
|
||||
console.log('🏁 Migration process completed');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('💥 Migration process failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
runMigrations,
|
||||
createMigration
|
||||
};
|
||||
Reference in New Issue
Block a user