Skip to content

Commit

Permalink
Merge pull request #34 from urbanriskmap/dev
Browse files Browse the repository at this point in the history
Merge dev into master
  • Loading branch information
tomasholderness authored Sep 27, 2017
2 parents 0698fb8 + c89552f commit a2e8cf6
Show file tree
Hide file tree
Showing 10 changed files with 156 additions and 40 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,9 @@ Flood affected area polygon data provided by emergency services via the REM inte
<dl>Chennai's municipal boundaries courtesy of Chennai Municipal Corportation</dl>
<dl>Chennai hydrological data (waterways) courtesy of Chennai Flood Management (http://chennaifloodmanagement.org/en/layers/geonode:watercourses#category-more)</dl>

**Broward County, Florida, US**
<dl>Broward County section grids courtesy of Broward County (http://gis.broward.org/GISData.htm)</dl>

### Dependencies
* [PostgreSQL](http://www.postgresql.org) version 9.5 or later, with
* [PostGIS](http://postgis.net) version 2.0 or later
Expand Down
69 changes: 44 additions & 25 deletions build/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ PGUSER=${PGUSER:-'postgres'}
COUNTRY=${COUNTRY:-'indonesia'}
PGDATABASE=${PGDATABASE:-'cognicity'}
PGPORT=${PGPORT:-'5432'}
PG_DEFAULT_DB=${PG_DEFAULT_DB:-'postgres'}

DATA=${DATA:-true} # optional load data flag (set false to skip data loading)
FUNCTIONS=${FUNCTIONS:-true} # optional load functions flag
Expand All @@ -13,72 +14,90 @@ SCHEMA=${SCHEMA:-true} #optional schema flag
if [ $SCHEMA == true ]; then
# Create Database
# Build command
BUILD="CREATE DATABASE \"$PGDATABASE\" WITH OWNER = \"$PGUSER\" ENCODING = 'UTF8' TABLESPACE = pg_default LC_COLLATE = 'en_US.UTF-8' LC_CTYPE = 'en_US.UTF-8' CONNECTION LIMIT = -1;"
echo "Create database"
BUILD="CREATE DATABASE \"$PGDATABASE\" WITH OWNER = \"$PGUSER\" ENCODING = 'UTF8' LC_COLLATE = 'en_US.UTF-8' LC_CTYPE = 'en_US.UTF-8' CONNECTION LIMIT = -1;"

psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c """$BUILD"""

POSTGIS="CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology; CREATE EXTENSION fuzzystrmatch; CREATE EXTENSION address_standardizer; CREATE EXTENSION postgis_tiger_geocoder;"
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PG_DEFAULT_DB -c """$BUILD"""

echo "Load PostGIS"
# Load postgis extensions
POSTGIS="CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology; CREATE EXTENSION fuzzystrmatch; CREATE EXTENSION address_standardizer; CREATE EXTENSION postgis_tiger_geocoder;"
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -c """$POSTGIS"""

echo "Load UUID"
# Load UUID extension
UUID="CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";"
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -c """$UUID"""

echo "Load schema"
# Load schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/cognicity/cognicity.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/cognicity/cognicity.schema.sql

echo "Load reports template schema"
# Load reports template schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/template/template.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/template/template.schema.sql

echo "Load twitter reports schema"
# Load the twitter (grasp version) reports schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/twitter/twitter.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/twitter/twitter.schema.sql

echo "Load grasp reports schema"
# Load the grasp reports schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/grasp/grasp.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/grasp/grasp.schema.sql

echo "Load detik reports schema"
# Load the detik reports schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/detik/detik.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/detik/detik.schema.sql

echo "Load qlue reports schema"
# Load the qlue reports schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/qlue/qlue.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/qlue/qlue.schema.sql

echo "Load zears report schema"
# Load the zears report schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/zears/zears.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/zears/zears.schema.sql

echo "Load floodgauge reports schema"
# Load the floodgauge reports schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/floodgauge/floodgauge.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/floodgauge/floodgauge.schema.sql

echo "Load infrastructure schema"
# Load the pumps, floodgates and waterways infrastructure schema data
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/infrastructure/infrastructure.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/infrastructure/infrastructure.schema.sql

echo "Load sensor schema"
# Load sensor schema
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/sensors/watersensor.schema.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/sensors/watersensor.schema.sql
fi

# Functions loading
if [ $FUNCTIONS == true ]; then
echo "Load functions"
# Load functions
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/cognicity/cognicity.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/template/template.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/grasp/grasp.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/detik/detik.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/qlue/qlue.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/zears/zears.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./schema/reports/floodgauge/floodgauge.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/cognicity/cognicity.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/template/template.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/grasp/grasp.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/detik/detik.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/qlue/qlue.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/zears/zears.functions.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/schema/reports/floodgauge/floodgauge.functions.sql
fi

# Static data loading
if [ $DATA == true ]; then
echo "Load data"
# Load instance data - regions
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./data/$COUNTRY/cognicity/instance_regions.data.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/data/$COUNTRY/cognicity/instance_regions.data.sql

# Load instance data - local areas
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./data/$COUNTRY/cognicity/local_areas.data.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/data/$COUNTRY/cognicity/local_areas.data.sql

# Load available infrastructure for selected country
for entry in ./data/$COUNTRY/infrastructure/*.sql
for entry in $(pwd)/data/$COUNTRY/infrastructure/*.sql
do
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $entry
done

# Initialize last seen tweet id
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f ./data/$COUNTRY/reports/twitter.data.sql
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f $(pwd)/data/$COUNTRY/reports/twitter.data.sql
fi
37 changes: 37 additions & 0 deletions build/upgrade.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/usr/bin/env bash

# This script creates a new schema and copies data over from production.
# Run this from the root directory e.g. cognicity-schema/

# Define databases
PROD=cognicity
NEW=new_cognicity

# Set env vars
export PGDATABASE=`echo $NEW`
export DATA=false # Disable default data loading in schema script
export PGHOST=localhost
export PG_DEFAULT_DB=cognicity # Upgrade so this exists
export PGUSER=postgres

# Create new database, empty schema
source build/run.sh

# Optional updates to old database
# Edit the old database - replace card id with UUID
## psql -d $PROD -h $PGHOST -U $PGUSER -f schema/reports/grasp/grasp.uuid_upgrade.sql

# Copy the old data to the new database
pg_dump -a $PROD -h $PGHOST -U $PGUSER --schema=cognicity --disable-triggers | psql -d $NEW -h $PGHOST -U $PGUSER

# Kill prod connections
psql -d $PROD -h $PGHOST -U $PGUSER -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '$PROD' AND pid <> pg_backend_pid();"

psql -d $NEW -h $PGHOST -U $PGUSER -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '$NEW' AND pid <> pg_backend_pid();"

# Append prod with _old suffix
OLD=$PROD'_old'
psql -d $NEW -h $PGHOST -U $PGUSER -c "ALTER DATABASE \"$PROD\" RENAME TO \"$OLD\""

# Rename new to prod
psql -d $OLD -h $PGHOST -U $PGUSER -c "ALTER DATABASE \"$NEW\" RENAME TO \"$PROD\""
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "cognicity-schema",
"version": "3.0.2",
"version": "3.0.3",
"description": "Cognicity Database Schema",
"scripts": {
"build": "./build/run.sh",
Expand Down
10 changes: 10 additions & 0 deletions schema/cognicity/cognicity.functions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -107,3 +107,13 @@ CREATE OR REPLACE FUNCTION cognicity.rem_get_flood(timestamp with time zone)

ALTER FUNCTION cognicity.rem_get_flood(timestamp with time zone)
OWNER TO postgres;

CREATE FUNCTION cognicity.version()
RETURNS varchar
AS $BODY$
BEGIN
RETURN '3.0.3';
END
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100;
4 changes: 2 additions & 2 deletions schema/reports/grasp/grasp.schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ CREATE schema grasp;
--Table grasp_card_id (card_id, received[true/false])
CREATE TABLE grasp.cards (
pkey bigserial NOT NULL,
card_id varchar NOT NULL UNIQUE,
card_id uuid NOT NULL UNIQUE DEFAULT uuid_generate_v4(),
username varchar NOT NULL,
network varchar NOT NULL,
language varchar NOT NULL,
Expand All @@ -15,7 +15,7 @@ CREATE TABLE grasp.cards (
--Table grasp_reports
CREATE TABLE grasp.reports (
pkey bigserial NOT NULL,
card_id varchar NOT NULL UNIQUE,
card_id uuid NOT NULL UNIQUE,
database_time timestamp with time zone DEFAULT now(),
created_at timestamp with time zone,
disaster_type varchar NOT NULL,
Expand Down
21 changes: 21 additions & 0 deletions schema/reports/grasp/grasp.uuid_upgrade.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
-- UPGRADE CARD_ID TO UUID TYPE FOR HISTORIC DATA

-- Update grasp.cards
-- 1. Create UUID extension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- 2. Move old card ids
ALTER TABLE grasp.cards RENAME card_id to old_card_id;
-- 3. Create new column
ALTER TABLE grasp.cards ADD card_id UUID DEFAULT uuid_generate_v4();

-- Now update grasp.reports table
-- 4. Mode old card ids
ALTER TABLE grasp.reports RENAME card_id to old_card_id;
ALTER TABLE grasp.reports ADD card_id UUID;

-- 5. Update reports
UPDATE grasp.reports SET card_id = grasp.cards.card_id FROM grasp.cards WHERE grasp.reports.old_card_id = grasp.cards.old_card_id;

-- 6. Drop old columns
ALTER TABLE grasp.cards DROP old_card_id;
ALTER TABLE grasp.reports DROP old_card_id;
2 changes: 2 additions & 0 deletions test/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import testQlue from './testQlue';
import testZears from './testZears';
import testFloodgauge from './testFloodgauge';
import testREM from './testREM';
import testVersion from './testVersion';

let instances = {
"indonesia": {
Expand Down Expand Up @@ -76,3 +77,4 @@ testQlue(db, instance)
testZears(db, instance)
testFloodgauge(db, instance)
testREM(db)
testVersion(db, process.env.npm_package_version)
28 changes: 16 additions & 12 deletions test/testGRASP.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,30 @@ export default (db, instance) => {
let card_pkey; // Global card pkey object as created by database
let report_fkey; // Global report foreign key object as created by database in reports table
let report_pkey; // Global report pkey object as created by database
let card_id; //Global card id object created by database

before ('Insert dummy GRASP data', (done) => {

// Insert test data
let query = "INSERT INTO grasp.cards (card_id, username, network, language, received) VALUES ('abcdefg', 'user', 'test network', 'en', True) RETURNING pkey";
let query = "INSERT INTO grasp.cards (username, network, language, received) VALUES ('user', 'test network', 'en', True) RETURNING pkey, card_id";

db.oneOrNone(query)
.then((data) => card_pkey = data.pkey)
.catch((error) => console.log(error));
.then((data) => {
card_pkey = data.pkey;
card_id = data.card_id;

// Insert test data
query = "INSERT INTO grasp.reports (card_id, created_at, disaster_type, text, card_data, image_url, status, the_geom) VALUES ('abcdefg', now(), 'flood', 'report text', $1, 'no_url', 'confirmed', ST_GeomFromText('POINT($2 $3)', 4326)) RETURNING pkey";
// Insert test data
query = "INSERT INTO grasp.reports (card_id, created_at, disaster_type, text, card_data, image_url, status, the_geom) VALUES ($4, now(), 'flood', 'report text', $1, 'no_url', 'confirmed', ST_GeomFromText('POINT($2 $3)', 4326)) RETURNING pkey";

let values = [ instance.test_card_data, instance.test_report_lon, instance.test_report_lat ];
let values = [ instance.test_card_data, instance.test_report_lon, instance.test_report_lat, card_id ];

db.oneOrNone(query, values)
.then((data) => {
report_fkey = data.pkey;
done();
})
db.oneOrNone(query, values)
.then((data) => {
report_fkey = data.pkey;
done();
})
.catch((error) => console.log(error));
})
.catch((error) => console.log(error));
});

Expand All @@ -41,7 +45,7 @@ export default (db, instance) => {
test.value(data[0].tags.disaster_type).is(instance.test_disaster_type);
test.value(data[0].text).is(instance.test_report_text);
test.value(data[0].lang).is(instance.test_report_lang);
test.value(data[0].url).is(instance.test_card_url);
test.value(data[0].url).is(card_id);
test.value(data[0].report_data.water_depth).is(instance.test_card_data.water_depth);

report_pkey = data[0].pkey;
Expand Down
20 changes: 20 additions & 0 deletions test/testVersion.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
const test = require('unit.js');

export default (db, version) => {
// Cards endpoint
describe('Schema version functionality', () => {

// Test
it ('Can get correct version', (done) => {

// Check the test data can be output with rem_get_flood function
let query = "SELECT * FROM cognicity.version()";
db.oneOrNone(query)
.then((data) => {
test.value(data.version).is(version);
done();
})
.catch((error) => test.fail(error));
});
});
}

0 comments on commit a2e8cf6

Please sign in to comment.