feat(location): add comprehensive geographic hierarchy (continents, countries, states, cities)
- Add migration 021_location_hierarchy.sql with new table structure - Add location-loader.js seeder to import SQL dumps - Update all seeder files to use country_id instead of region_id - Rename companies.region_id to country_id
This commit is contained in:
parent
bc7b8f53f4
commit
1018da8036
7 changed files with 379 additions and 36 deletions
158
backend/migrations/021_location_hierarchy.sql
Normal file
158
backend/migrations/021_location_hierarchy.sql
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
-- Migration: 021_location_hierarchy.sql
|
||||||
|
-- Description: Restructure location tables to use comprehensive geographic hierarchy
|
||||||
|
-- Data Source: GeoDB Cities (https://github.com/dr5hn/countries-states-cities-database)
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- PHASE 1: Backup existing tables
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Rename old tables to preserve data
|
||||||
|
ALTER TABLE IF EXISTS regions RENAME TO regions_old;
|
||||||
|
ALTER TABLE IF EXISTS cities RENAME TO cities_old;
|
||||||
|
|
||||||
|
-- Drop old indexes (they will conflict)
|
||||||
|
DROP INDEX IF EXISTS idx_regions_country;
|
||||||
|
DROP INDEX IF EXISTS idx_cities_region;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- PHASE 2: Create new location hierarchy
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- 2.1 Continents (formerly "regions" in GeoDB)
|
||||||
|
CREATE TABLE IF NOT EXISTS continents (
|
||||||
|
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
translations TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
flag SMALLINT DEFAULT 1 NOT NULL,
|
||||||
|
wiki_data_id VARCHAR(255)
|
||||||
|
);
|
||||||
|
|
||||||
|
COMMENT ON TABLE continents IS 'Geographic continents (Africa, Americas, Asia, Europe, Oceania, Polar)';
|
||||||
|
|
||||||
|
-- 2.2 Subregions (e.g., Northern Africa, South America)
|
||||||
|
CREATE TABLE IF NOT EXISTS subregions (
|
||||||
|
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
continent_id BIGINT NOT NULL REFERENCES continents(id),
|
||||||
|
translations TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
flag SMALLINT DEFAULT 1 NOT NULL,
|
||||||
|
wiki_data_id VARCHAR(255)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_subregions_continent ON subregions(continent_id);
|
||||||
|
COMMENT ON TABLE subregions IS 'Geographic subregions within continents';
|
||||||
|
|
||||||
|
-- 2.3 Countries
|
||||||
|
CREATE TABLE IF NOT EXISTS countries (
|
||||||
|
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
iso2 CHAR(2),
|
||||||
|
iso3 CHAR(3),
|
||||||
|
numeric_code CHAR(3),
|
||||||
|
phonecode VARCHAR(255),
|
||||||
|
capital VARCHAR(255),
|
||||||
|
currency VARCHAR(255),
|
||||||
|
currency_name VARCHAR(255),
|
||||||
|
currency_symbol VARCHAR(255),
|
||||||
|
tld VARCHAR(255),
|
||||||
|
native VARCHAR(255),
|
||||||
|
continent_id BIGINT REFERENCES continents(id),
|
||||||
|
subregion_id BIGINT REFERENCES subregions(id),
|
||||||
|
nationality VARCHAR(255),
|
||||||
|
latitude DECIMAL(10,8),
|
||||||
|
longitude DECIMAL(11,8),
|
||||||
|
emoji VARCHAR(10),
|
||||||
|
emoji_u VARCHAR(50),
|
||||||
|
timezones TEXT,
|
||||||
|
translations TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
flag SMALLINT DEFAULT 1 NOT NULL,
|
||||||
|
wiki_data_id VARCHAR(255)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_countries_iso2 ON countries(iso2);
|
||||||
|
CREATE INDEX idx_countries_iso3 ON countries(iso3);
|
||||||
|
CREATE INDEX idx_countries_continent ON countries(continent_id);
|
||||||
|
CREATE INDEX idx_countries_subregion ON countries(subregion_id);
|
||||||
|
COMMENT ON TABLE countries IS 'All countries with ISO codes, currencies, and metadata';
|
||||||
|
|
||||||
|
-- 2.4 States/Provinces
|
||||||
|
CREATE TABLE IF NOT EXISTS states (
|
||||||
|
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
country_id BIGINT NOT NULL REFERENCES countries(id),
|
||||||
|
country_code CHAR(2) NOT NULL,
|
||||||
|
iso2 VARCHAR(10),
|
||||||
|
fips_code VARCHAR(255),
|
||||||
|
type VARCHAR(191),
|
||||||
|
latitude DECIMAL(10,8),
|
||||||
|
longitude DECIMAL(11,8),
|
||||||
|
timezone VARCHAR(255),
|
||||||
|
translations TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
flag SMALLINT DEFAULT 1 NOT NULL,
|
||||||
|
wiki_data_id VARCHAR(255)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_states_country ON states(country_id);
|
||||||
|
CREATE INDEX idx_states_country_code ON states(country_code);
|
||||||
|
CREATE INDEX idx_states_iso2 ON states(iso2);
|
||||||
|
COMMENT ON TABLE states IS 'States, provinces, and administrative regions';
|
||||||
|
|
||||||
|
-- 2.5 Cities
|
||||||
|
CREATE TABLE IF NOT EXISTS cities (
|
||||||
|
id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
state_id BIGINT NOT NULL REFERENCES states(id),
|
||||||
|
state_code VARCHAR(255),
|
||||||
|
country_id BIGINT NOT NULL REFERENCES countries(id),
|
||||||
|
country_code CHAR(2) NOT NULL,
|
||||||
|
latitude DECIMAL(10,8) NOT NULL,
|
||||||
|
longitude DECIMAL(11,8) NOT NULL,
|
||||||
|
population BIGINT,
|
||||||
|
timezone VARCHAR(255),
|
||||||
|
translations TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
flag SMALLINT DEFAULT 1 NOT NULL,
|
||||||
|
wiki_data_id VARCHAR(255)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_cities_state ON cities(state_id);
|
||||||
|
CREATE INDEX idx_cities_country ON cities(country_id);
|
||||||
|
CREATE INDEX idx_cities_country_code ON cities(country_code);
|
||||||
|
CREATE INDEX idx_cities_population ON cities(population);
|
||||||
|
COMMENT ON TABLE cities IS 'Cities with coordinates and population data';
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- PHASE 3: Update companies table FKs
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Drop old FK constraint if exists (ignore errors)
|
||||||
|
ALTER TABLE companies DROP CONSTRAINT IF EXISTS companies_region_id_fkey;
|
||||||
|
ALTER TABLE companies DROP CONSTRAINT IF EXISTS companies_city_id_fkey;
|
||||||
|
|
||||||
|
-- Update column types to BIGINT (to match new tables)
|
||||||
|
ALTER TABLE companies ALTER COLUMN region_id TYPE BIGINT USING region_id::BIGINT;
|
||||||
|
ALTER TABLE companies ALTER COLUMN city_id TYPE BIGINT USING city_id::BIGINT;
|
||||||
|
|
||||||
|
-- Rename region_id to country_id for clarity
|
||||||
|
ALTER TABLE companies RENAME COLUMN region_id TO country_id;
|
||||||
|
|
||||||
|
-- Add new FK constraints (without REFERENCES for now - data will be populated by seeder)
|
||||||
|
-- These will be validated after seeder populates the data
|
||||||
|
|
||||||
|
-- Update indexes
|
||||||
|
DROP INDEX IF EXISTS idx_companies_region;
|
||||||
|
CREATE INDEX idx_companies_country ON companies(country_id);
|
||||||
|
CREATE INDEX idx_companies_city ON companies(city_id);
|
||||||
|
|
||||||
|
-- Add comments
|
||||||
|
COMMENT ON COLUMN companies.country_id IS 'Reference to countries table';
|
||||||
|
COMMENT ON COLUMN companies.city_id IS 'Reference to cities table';
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
import { pool, testConnection, closePool } from './db.js';
|
import { pool, testConnection, closePool } from './db.js';
|
||||||
import { seedRegions } from './seeders/regions.js';
|
import { seedLocationData } from './seeders/location-loader.js';
|
||||||
import { seedCities } from './seeders/cities.js';
|
|
||||||
import { seedUsers } from './seeders/users.js';
|
import { seedUsers } from './seeders/users.js';
|
||||||
import { seedCompanies } from './seeders/companies.js';
|
import { seedCompanies } from './seeders/companies.js';
|
||||||
import { seedJobs } from './seeders/jobs.js';
|
import { seedJobs } from './seeders/jobs.js';
|
||||||
|
|
@ -64,8 +63,10 @@ async function seedDatabase() {
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
||||||
// Seed in order (respecting foreign key dependencies)
|
// Seed in order (respecting foreign key dependencies)
|
||||||
await seedRegions();
|
// 1. Location data first (continents -> subregions -> countries -> states -> cities)
|
||||||
await seedCities();
|
await seedLocationData();
|
||||||
|
|
||||||
|
// 2. Then companies (need countries)
|
||||||
await seedCompanies();
|
await seedCompanies();
|
||||||
await seedUsers();
|
await seedUsers();
|
||||||
await seedJobs();
|
await seedJobs();
|
||||||
|
|
@ -78,8 +79,7 @@ async function seedDatabase() {
|
||||||
|
|
||||||
console.log('\n✅ Database seeding completed successfully!');
|
console.log('\n✅ Database seeding completed successfully!');
|
||||||
console.log('\n📊 Summary:');
|
console.log('\n📊 Summary:');
|
||||||
console.log(' - Regions seeded');
|
console.log(' - 🌍 Location data (continents, subregions, countries, states, cities)');
|
||||||
console.log(' - Cities seeded');
|
|
||||||
console.log(' - 1 SuperAdmin');
|
console.log(' - 1 SuperAdmin');
|
||||||
console.log(' - 43 Companies (30 + 13 fictícias)');
|
console.log(' - 43 Companies (30 + 13 fictícias)');
|
||||||
console.log(' - 1129+ Jobs total');
|
console.log(' - 1129+ Jobs total');
|
||||||
|
|
|
||||||
|
|
@ -98,16 +98,16 @@ export async function seedAcmeCorp() {
|
||||||
console.log('🏭 Seeding ACME Corporation e 69 vagas hilariantes...');
|
console.log('🏭 Seeding ACME Corporation e 69 vagas hilariantes...');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get or create a default region
|
// Get USA's country ID from new countries table
|
||||||
const regionsRes = await pool.query('SELECT id FROM regions LIMIT 1');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'US'");
|
||||||
const defaultRegionId = regionsRes.rows[0]?.id || null;
|
const usaId = countryResult.rows[0]?.id || null;
|
||||||
|
|
||||||
// 1. Create ACME Company
|
// 1. Create ACME Company
|
||||||
const acmeSlug = 'ACME Corporation';
|
const acmeSlug = 'ACME Corporation';
|
||||||
const acmeCNPJ = '99.999.999/0001-99';
|
const acmeCNPJ = '99.999.999/0001-99';
|
||||||
|
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active, logo_url)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active, logo_url)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||||
ON CONFLICT (slug) DO UPDATE SET
|
ON CONFLICT (slug) DO UPDATE SET
|
||||||
name = EXCLUDED.name,
|
name = EXCLUDED.name,
|
||||||
|
|
@ -120,7 +120,7 @@ export async function seedAcmeCorp() {
|
||||||
'company',
|
'company',
|
||||||
acmeCNPJ,
|
acmeCNPJ,
|
||||||
'Estrada do Deserto s/n, Monument Valley, Mojave, USA',
|
'Estrada do Deserto s/n, Monument Valley, Mojave, USA',
|
||||||
defaultRegionId,
|
usaId,
|
||||||
'+1-800-ACME-TNT',
|
'+1-800-ACME-TNT',
|
||||||
'careers@acme.corp',
|
'careers@acme.corp',
|
||||||
'https://acme.corp',
|
'https://acme.corp',
|
||||||
|
|
|
||||||
|
|
@ -54,11 +54,9 @@ function generateCNPJ(index) {
|
||||||
export async function seedCompanies() {
|
export async function seedCompanies() {
|
||||||
console.log('🏢 Seeding 30 companies...');
|
console.log('🏢 Seeding 30 companies...');
|
||||||
|
|
||||||
// Get region IDs
|
// Get Brazil's country ID from new countries table
|
||||||
const regions = await pool.query('SELECT id, code FROM regions');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'BR'");
|
||||||
const regMap = {};
|
const brazilId = countryResult.rows[0]?.id || null;
|
||||||
regions.rows.forEach(r => regMap[r.code] = r.id);
|
|
||||||
const defaultRegionId = regMap['13'] || (regions.rows.length > 0 ? regions.rows[0].id : null);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
for (let i = 0; i < companyData.length; i++) {
|
for (let i = 0; i < companyData.length; i++) {
|
||||||
|
|
@ -67,7 +65,7 @@ export async function seedCompanies() {
|
||||||
const slug = generateSlug(company.name);
|
const slug = generateSlug(company.name);
|
||||||
|
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
ON CONFLICT (slug) DO UPDATE SET
|
ON CONFLICT (slug) DO UPDATE SET
|
||||||
name = EXCLUDED.name,
|
name = EXCLUDED.name,
|
||||||
|
|
@ -80,7 +78,7 @@ export async function seedCompanies() {
|
||||||
'company',
|
'company',
|
||||||
generateCNPJ(i),
|
generateCNPJ(i),
|
||||||
city,
|
city,
|
||||||
defaultRegionId,
|
brazilId,
|
||||||
`+55-11-${3000 + i}-${String(i).padStart(4, '0')}`,
|
`+55-11-${3000 + i}-${String(i).padStart(4, '0')}`,
|
||||||
`careers@${slug}.com`,
|
`careers@${slug}.com`,
|
||||||
`https://${slug}.com`,
|
`https://${slug}.com`,
|
||||||
|
|
@ -88,6 +86,7 @@ export async function seedCompanies() {
|
||||||
true,
|
true,
|
||||||
true
|
true
|
||||||
]);
|
]);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Seed System Company for SuperAdmin
|
// Seed System Company for SuperAdmin
|
||||||
|
|
|
||||||
|
|
@ -260,12 +260,12 @@ const sprawlMartJobs = [
|
||||||
|
|
||||||
async function createCompanyAndJobs(companyData, jobs) {
|
async function createCompanyAndJobs(companyData, jobs) {
|
||||||
try {
|
try {
|
||||||
const regionsRes = await pool.query('SELECT id FROM regions LIMIT 1');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'US'");
|
||||||
const defaultRegionId = regionsRes.rows[0]?.id || null;
|
const usaId = countryResult.rows[0]?.id || null;
|
||||||
|
|
||||||
// Create Company (companies uses SERIAL id)
|
// Create Company (companies uses SERIAL id)
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
||||||
`, [
|
`, [
|
||||||
|
|
@ -274,7 +274,7 @@ async function createCompanyAndJobs(companyData, jobs) {
|
||||||
'company',
|
'company',
|
||||||
companyData.cnpj,
|
companyData.cnpj,
|
||||||
companyData.address,
|
companyData.address,
|
||||||
defaultRegionId,
|
usaId,
|
||||||
companyData.phone,
|
companyData.phone,
|
||||||
companyData.email,
|
companyData.email,
|
||||||
companyData.website,
|
companyData.website,
|
||||||
|
|
|
||||||
|
|
@ -147,12 +147,12 @@ export async function seedStarkIndustries() {
|
||||||
console.log('🦾 Seeding Stark Industries (Marvel)...');
|
console.log('🦾 Seeding Stark Industries (Marvel)...');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const regionsRes = await pool.query('SELECT id FROM regions LIMIT 1');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'US'");
|
||||||
const defaultRegionId = regionsRes.rows[0]?.id || null;
|
const usaId = countryResult.rows[0]?.id || null;
|
||||||
|
|
||||||
// Create Company
|
// Create Company
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
||||||
`, [
|
`, [
|
||||||
|
|
@ -161,7 +161,7 @@ export async function seedStarkIndustries() {
|
||||||
'company',
|
'company',
|
||||||
'77.777.777/0001-77',
|
'77.777.777/0001-77',
|
||||||
'Stark Tower, 200 Park Avenue, Manhattan, New York, NY 10166',
|
'Stark Tower, 200 Park Avenue, Manhattan, New York, NY 10166',
|
||||||
defaultRegionId,
|
usaId,
|
||||||
'+1-212-STARK-01',
|
'+1-212-STARK-01',
|
||||||
'careers@starkindustries.com',
|
'careers@starkindustries.com',
|
||||||
'https://starkindustries.com',
|
'https://starkindustries.com',
|
||||||
|
|
@ -226,12 +226,12 @@ export async function seedLosPollosHermanos() {
|
||||||
console.log('🐔 Seeding Los Pollos Hermanos (Breaking Bad)...');
|
console.log('🐔 Seeding Los Pollos Hermanos (Breaking Bad)...');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const regionsRes = await pool.query('SELECT id FROM regions LIMIT 1');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'US'");
|
||||||
const defaultRegionId = regionsRes.rows[0]?.id || null;
|
const usaId = countryResult.rows[0]?.id || null;
|
||||||
|
|
||||||
// Create Company
|
// Create Company
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
||||||
`, [
|
`, [
|
||||||
|
|
@ -240,7 +240,7 @@ export async function seedLosPollosHermanos() {
|
||||||
'company',
|
'company',
|
||||||
'66.666.666/0001-66',
|
'66.666.666/0001-66',
|
||||||
'308 Negra Arroyo Lane, Albuquerque, NM 87104',
|
'308 Negra Arroyo Lane, Albuquerque, NM 87104',
|
||||||
defaultRegionId,
|
usaId,
|
||||||
'+1-505-POLLOS',
|
'+1-505-POLLOS',
|
||||||
'careers@lospollos.com',
|
'careers@lospollos.com',
|
||||||
'https://lospollos.com',
|
'https://lospollos.com',
|
||||||
|
|
@ -311,12 +311,12 @@ export async function seedSpringfieldNuclear() {
|
||||||
console.log('☢️ Seeding Springfield Nuclear Power Plant (Simpsons)...');
|
console.log('☢️ Seeding Springfield Nuclear Power Plant (Simpsons)...');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const regionsRes = await pool.query('SELECT id FROM regions LIMIT 1');
|
const countryResult = await pool.query("SELECT id FROM countries WHERE iso2 = 'US'");
|
||||||
const defaultRegionId = regionsRes.rows[0]?.id || null;
|
const usaId = countryResult.rows[0]?.id || null;
|
||||||
|
|
||||||
// Create Company
|
// Create Company
|
||||||
await pool.query(`
|
await pool.query(`
|
||||||
INSERT INTO companies (name, slug, type, document, address, region_id, phone, email, website, description, verified, active)
|
INSERT INTO companies (name, slug, type, document, address, country_id, phone, email, website, description, verified, active)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description
|
||||||
`, [
|
`, [
|
||||||
|
|
@ -325,7 +325,7 @@ export async function seedSpringfieldNuclear() {
|
||||||
'company',
|
'company',
|
||||||
'88.888.888/0001-88',
|
'88.888.888/0001-88',
|
||||||
'100 Industrial Way, Springfield, State Unknown',
|
'100 Industrial Way, Springfield, State Unknown',
|
||||||
defaultRegionId,
|
usaId,
|
||||||
'+1-555-BURNS',
|
'+1-555-BURNS',
|
||||||
'careers@snpp.com',
|
'careers@snpp.com',
|
||||||
'https://snpp.com',
|
'https://snpp.com',
|
||||||
|
|
|
||||||
186
seeder-api/src/seeders/location-loader.js
Normal file
186
seeder-api/src/seeders/location-loader.js
Normal file
|
|
@ -0,0 +1,186 @@
|
||||||
|
import { pool } from '../db.js';
|
||||||
|
import { readFileSync, createReadStream } from 'fs';
|
||||||
|
import { createGunzip } from 'zlib';
|
||||||
|
import { pipeline } from 'stream/promises';
|
||||||
|
import { Writable } from 'stream';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
const SQL_DIR = join(__dirname, '..', '..', 'sql');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a SQL file directly
|
||||||
|
*/
|
||||||
|
async function executeSqlFile(filename, tableName) {
|
||||||
|
const filePath = join(SQL_DIR, filename);
|
||||||
|
console.log(` 📄 Loading ${filename}...`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
let sql = readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
|
// Clean up postgres-specific commands that might cause issues
|
||||||
|
sql = sql
|
||||||
|
.replace(/\\restrict[^\n]*/g, '')
|
||||||
|
.replace(/\\unrestrict[^\n]*/g, '')
|
||||||
|
.replace(/SELECT pg_catalog\.setval[^;]*;/g, '')
|
||||||
|
.replace(/ALTER TABLE[^;]*OWNER TO[^;]*;/g, '')
|
||||||
|
.replace(/COMMENT ON[^;]*;/g, '')
|
||||||
|
.replace(/SET[^;]*;/g, '')
|
||||||
|
.replace(/SELECT[^;]*set_config[^;]*;/g, '');
|
||||||
|
|
||||||
|
// Extract only INSERT statements
|
||||||
|
const insertStatements = sql.match(/INSERT INTO[^;]+;/g) || [];
|
||||||
|
|
||||||
|
if (insertStatements.length === 0) {
|
||||||
|
console.log(` ⚠️ No INSERT statements found in ${filename}`);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute each INSERT statement
|
||||||
|
for (const stmt of insertStatements) {
|
||||||
|
// Convert MySQL column names to PostgreSQL (camelCase -> snake_case for some)
|
||||||
|
let pgStmt = stmt
|
||||||
|
.replace(/`/g, '"')
|
||||||
|
.replace(/"emojiU"/g, 'emoji_u')
|
||||||
|
.replace(/"wikiDataId"/g, 'wiki_data_id');
|
||||||
|
await pool.query(pgStmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✓ ${insertStatements.length} records inserted into ${tableName}`);
|
||||||
|
return insertStatements.length;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(` ❌ Error loading ${filename}:`, error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a gzipped SQL file
|
||||||
|
*/
|
||||||
|
async function executeGzippedSqlFile(filename, tableName) {
|
||||||
|
const filePath = join(SQL_DIR, filename);
|
||||||
|
console.log(` 📄 Loading ${filename} (gzipped)...`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Read and decompress
|
||||||
|
let sql = '';
|
||||||
|
const gunzip = createGunzip();
|
||||||
|
const readStream = createReadStream(filePath);
|
||||||
|
|
||||||
|
await pipeline(
|
||||||
|
readStream,
|
||||||
|
gunzip,
|
||||||
|
new Writable({
|
||||||
|
write(chunk, encoding, callback) {
|
||||||
|
sql += chunk.toString();
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Clean up postgres-specific commands
|
||||||
|
sql = sql
|
||||||
|
.replace(/\\restrict[^\n]*/g, '')
|
||||||
|
.replace(/\\unrestrict[^\n]*/g, '')
|
||||||
|
.replace(/SELECT pg_catalog\.setval[^;]*;/g, '')
|
||||||
|
.replace(/ALTER TABLE[^;]*OWNER TO[^;]*;/g, '')
|
||||||
|
.replace(/COMMENT ON[^;]*;/g, '')
|
||||||
|
.replace(/SET[^;]*;/g, '')
|
||||||
|
.replace(/SELECT[^;]*set_config[^;]*;/g, '');
|
||||||
|
|
||||||
|
// Extract only INSERT statements
|
||||||
|
const insertStatements = sql.match(/INSERT INTO[^;]+;/g) || [];
|
||||||
|
|
||||||
|
if (insertStatements.length === 0) {
|
||||||
|
console.log(` ⚠️ No INSERT statements found in ${filename}`);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` 📊 Found ${insertStatements.length} records to insert...`);
|
||||||
|
|
||||||
|
// Batch insert for performance
|
||||||
|
const BATCH_SIZE = 1000;
|
||||||
|
for (let i = 0; i < insertStatements.length; i += BATCH_SIZE) {
|
||||||
|
const batch = insertStatements.slice(i, i + BATCH_SIZE);
|
||||||
|
for (const stmt of batch) {
|
||||||
|
let pgStmt = stmt
|
||||||
|
.replace(/`/g, '"')
|
||||||
|
.replace(/"emojiU"/g, 'emoji_u')
|
||||||
|
.replace(/"wikiDataId"/g, 'wiki_data_id');
|
||||||
|
await pool.query(pgStmt);
|
||||||
|
}
|
||||||
|
if ((i + BATCH_SIZE) % 10000 === 0 || i + BATCH_SIZE >= insertStatements.length) {
|
||||||
|
console.log(` ... ${Math.min(i + BATCH_SIZE, insertStatements.length)} / ${insertStatements.length}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✓ ${insertStatements.length} records inserted into ${tableName}`);
|
||||||
|
return insertStatements.length;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(` ❌ Error loading ${filename}:`, error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Seed all location data from SQL dumps
|
||||||
|
*/
|
||||||
|
export async function seedLocationData() {
|
||||||
|
console.log('🌍 Seeding comprehensive location data...');
|
||||||
|
console.log(' Source: GeoDB Cities (https://github.com/dr5hn/countries-states-cities-database)\n');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 1. Continents (from regions.sql - 6 records)
|
||||||
|
console.log('1️⃣ Seeding Continents...');
|
||||||
|
await executeSqlFile('regions.sql', 'continents');
|
||||||
|
|
||||||
|
// 2. Subregions (22 records)
|
||||||
|
console.log('2️⃣ Seeding Subregions...');
|
||||||
|
await executeSqlFile('subregions.sql', 'subregions');
|
||||||
|
|
||||||
|
// 3. Countries (~250 records)
|
||||||
|
console.log('3️⃣ Seeding Countries...');
|
||||||
|
await executeSqlFile('countries.sql', 'countries');
|
||||||
|
|
||||||
|
// 4. States (~5400 records)
|
||||||
|
console.log('4️⃣ Seeding States...');
|
||||||
|
await executeSqlFile('states.sql', 'states');
|
||||||
|
|
||||||
|
// 5. Cities (~160k records) - This is the big one
|
||||||
|
console.log('5️⃣ Seeding Cities (this may take a while)...');
|
||||||
|
await executeGzippedSqlFile('cities.sql.gz', 'cities');
|
||||||
|
|
||||||
|
console.log('\n ✅ Location data seeding complete!');
|
||||||
|
|
||||||
|
// Print counts
|
||||||
|
const counts = await pool.query(`
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM continents) as continents,
|
||||||
|
(SELECT COUNT(*) FROM subregions) as subregions,
|
||||||
|
(SELECT COUNT(*) FROM countries) as countries,
|
||||||
|
(SELECT COUNT(*) FROM states) as states,
|
||||||
|
(SELECT COUNT(*) FROM cities) as cities
|
||||||
|
`);
|
||||||
|
|
||||||
|
const c = counts.rows[0];
|
||||||
|
console.log(` 📊 Totals: ${c.continents} continents, ${c.subregions} subregions, ${c.countries} countries, ${c.states} states, ${c.cities} cities`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Location seeding failed:', error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For direct execution
|
||||||
|
if (process.argv[1] === fileURLToPath(import.meta.url)) {
|
||||||
|
import('../db.js').then(async ({ testConnection, closePool }) => {
|
||||||
|
const connected = await testConnection();
|
||||||
|
if (!connected) {
|
||||||
|
console.error('Could not connect to database');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
await seedLocationData();
|
||||||
|
await closePool();
|
||||||
|
});
|
||||||
|
}
|
||||||
Loading…
Reference in a new issue