Skip to content

Commit

Permalink
Rock mbtiles and OSM ways export working with new build scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
kueda committed Apr 22, 2020
1 parent dde1d0b commit daa21e5
Show file tree
Hide file tree
Showing 7 changed files with 240 additions and 38 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ cd underfoot
# SQLite will note compile
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash
source ~/.profile
nvm use
nvm install

# Set up a python virtual environment
virtualenv venv -p python3 --no-site-packages
Expand Down
2 changes: 1 addition & 1 deletion elevation.js
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ const makeContours = ( swlon, swlat, nelon, nelat ) => {
var pgClient = new pg.Client( { database: "underfoot", user: "underfoot", password: "underfoot" } );
pgClient.connect( err => {
if ( err ) throw err;
pgClient.query('SELECT ST_Extent(ST_Transform(geom, 4326)) FROM units', [], ( err, result ) => {
pgClient.query('SELECT ST_Extent(ST_Transform(geom, 4326)) FROM rock_units', [], ( err, result ) => {
if ( err ) throw err;
matches = result.rows[0]['st_extent'].match(/BOX\(([0-9\-\.]+) ([0-9\-\.]+),([0-9\-\.]+) ([0-9\-\.]+)\)/)
const swlat = matches[2];
Expand Down
143 changes: 143 additions & 0 deletions osm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
import os
import psycopg2
import sys
from sources import util

DBNAME = "underfoot_ways"
DB_USER = "underfoot"
DB_PASSWORD = "underfoot"
TABLE_NAME = "underfoot_ways"

def make_ways(pbf_url, clean=False, bbox=None):
r"""Make an MBTiles files for OSM ways data given an OSM PBF export URL
Parameters
----------
pbf_url : str
The URL of a PBF export of OSM data
bbox : dict
Bounding box to import from the PBF export of the form
{"top": 1, "bottom": 0, "left": 0, "right": 1}
clean : bool
Force
"""
# Bail if no PBF URL
if not pbf_url or len(pbf_url) == 0:
raise ValueError("You must specify a PBF URL")
filename = os.path.basename(pbf_url)
if os.path.isfile(filename) and not clean:
pass
else:
util.call_cmd(["curl", "-o", filename, pbf_url], check=True)
if clean:
util.call_cmd(["dropdb", DBNAME], check=True)
# Check to see if db exists
try:
con = psycopg2.connect("dbname={}".format(DBNAME))
except psycopg2.OperationalError:
util.call_cmd(["createdb", DBNAME])
util.call_cmd(["psql", "-d", DBNAME, "-c", "CREATE EXTENSION postgis; CREATE EXTENSION hstore;"])
util.call_cmd(["psql", "-d", DBNAME, "-f", "/usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6.sql"])
util.call_cmd(["psql", "-d", DBNAME, "-f", "/usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_bbox.sql"])
util.call_cmd(["psql", "-d", DBNAME, "-f", "/usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_linestring.sql"])
con = psycopg2.connect("dbname={}".format(DBNAME))
# con.close()
# Check to see if table exists and has rows
cur1 = con.cursor()
ways_table_missing = False
try:
cur1.execute("SELECT count(*) FROM ways")
# Bail if it has rows unless we're forcing it
row = cur1.fetchone()
if row[0] > 0:
print("ways table has rows. Use --clean to force a new import")
else:
ways_table_missing = True
except psycopg2.errors.UndefinedTable:
ways_table_missing = True
con.close()
if ways_table_missing:
read_args = [
"--read-pbf", filename,
"--tf", "accept-ways", "highway=*",
"--tf", "reject-ways", "service=*",
"--tf", "reject-ways", "footway=sidewalk",
"--tf", "reject-ways", "highway=proposed",
"--tf", "reject-ways", "highway=footway",
"--tf", "reject-ways", "highway=pedestrian",
"--tf", "reject-ways", "highway=steps"
]
# Get bounding box coordinates from the database... or maybe the source
if bbox:
read_args += [
"--bounding-box",
"top={}".format(bbox["top"]),
"left={}".format(bbox["left"]),
"bottom={}".format(bbox["bottom"]),
"right={}".format(bbox["right"])
]
write_args = [
"--write-pgsql",
"database={}".format(DBNAME),
"user={}".format(DB_USER),
"password={}".format(DB_PASSWORD)
]
cmd = ["osmosis"] + read_args + write_args
# Load data from PBF into the database with osmosis
util.call_cmd(cmd)
util.call_cmd(["psql", DBNAME, "-c", "DROP TABLE {}".format(TABLE_NAME)])
util.call_cmd([
"psql",
DBNAME,
"-c",
"CREATE TABLE {} AS SELECT id, version, tags -> 'name' AS name, tags -> 'highway' AS highway, linestring FROM ways".format(TABLE_NAME)
])
# Export ways into the MBTiles using different zoom levels for different types
mbtiles_path = "./underfoot_ways.mbtiles"
if os.path.exists(mbtiles_path):
os.remove(mbtiles_path)
util.call_cmd([
"./node_modules/tl/bin/tl.js", "copy", "-i", "underfoot_ways.json", "-z", "3", "-Z", "13",
"postgis://{}:{}@localhost:5432/{}?table={}&query=(SELECT%20*%20from%20underfoot_ways%20WHERE%20highway%20in%20('motorway'))%20AS%20foo".format(
DB_USER,
DB_PASSWORD,
DBNAME,
TABLE_NAME
),
"mbtiles://{}".format(mbtiles_path)
])
util.call_cmd([
"./node_modules/tl/bin/tl.js", "copy", "-i", "underfoot_ways.json", "-z", "7", "-Z", "13",
"postgis://{}:{}@localhost:5432/{}?table={}&query=(SELECT%20*%20from%20underfoot_ways%20WHERE%20highway%20in%20('motorway','primary','trunk'))%20AS%20foo".format(
DB_USER,
DB_PASSWORD,
DBNAME,
TABLE_NAME
),
"mbtiles://{}".format(mbtiles_path)
])
util.call_cmd([
"./node_modules/tl/bin/tl.js", "copy", "-i", "underfoot_ways.json", "-z", "11", "-Z", "13",
"postgis://{}:{}@localhost:5432/{}?table={}&query=(SELECT%20*%20from%20underfoot_ways%20WHERE%20highway%20in%20('motorway','primary','trunk','secondary','tertiary','motorway_link'))%20AS%20foo".format(
DB_USER,
DB_PASSWORD,
DBNAME,
TABLE_NAME
),
"mbtiles://{}".format(mbtiles_path)
])
util.call_cmd([
"./node_modules/tl/bin/tl.js", "copy", "-i", "underfoot_ways.json", "-z", "13", "-Z", "13",
"postgis://{}:{}@localhost:5432/{}?table={}".format(
DB_USER,
DB_PASSWORD,
DBNAME,
TABLE_NAME
),
"mbtiles://{}".format(mbtiles_path)
])
return mbtiles_path

if __name__ == "__main__":
mbtiles_path = make_ways(sys.argv[0])
print("Created mbtiles at {}".format(mbtiles_path))
50 changes: 28 additions & 22 deletions osm.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,34 @@ psql -d underfoot_ways < /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_b
psql -d underfoot_ways < /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_linestring.sql

# for state in arizona california # if you want to make an osm export for multiple states
for state in california
do
echo ""
echo "Downloading an export of recent $state OSM data (thanks, Geofabrik!)"
if ! [ -e $state-latest.osm.pbf ]
then
curl -o $state-latest.osm.pbf http://download.geofabrik.de/north-america/us/$state-latest.osm.pbf
fi
# Load ways from the PBF into the database
echo ""
echo "Loading data from the PBF into the database"
osmosis \
--read-pbf $state-latest.osm.pbf \
--tf accept-ways highway=* \
--tf reject-ways service=* \
--tf reject-ways footway=sidewalk \
--tf reject-ways highway=proposed \
--tf reject-ways highway=footway \
--tf reject-ways highway=pedestrian \
--tf reject-ways highway=steps \
--write-pgsql database="underfoot_ways" user="underfoot" password="underfoot"
done
pbf_url=$1
if [ -z "$pbf_url" ]
then
echo "You must specify a PBF URL"
exit 1
fi
echo "PBF URL: $pbf_url"
filename=$(basename $pbf_url)
echo "Filename: $filename"
echo ""
echo "Downloading OSM data from $pbf_url (thanks, Geofabrik!)"
if ! [ -e $filename ]
then
curl -o $filename $pbf_url
fi
# Load ways from the PBF into the database
echo ""
echo "Loading data from the PBF into the database"
osmosis \
--read-pbf $filename \
--tf accept-ways highway=* \
--tf reject-ways service=* \
--tf reject-ways footway=sidewalk \
--tf reject-ways highway=proposed \
--tf reject-ways highway=footway \
--tf reject-ways highway=pedestrian \
--tf reject-ways highway=steps \
--write-pgsql database="underfoot_ways" user="underfoot" password="underfoot"

# Create a table for ways with just names and highway tags
echo ""
Expand Down
20 changes: 16 additions & 4 deletions packs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@

import argparse
from rocks import make_rocks
from osm import make_ways
from database import make_database
from sources import util

PACKS = {
"us-ca": {
Expand Down Expand Up @@ -59,32 +61,42 @@
"rock": [
"mf2342c", # Oakland, CA
],
"osm": "http://download.geofabrik.de/north-america/us/california/norcal-latest.osm.pbf"
"osm": "http://download.geofabrik.de/north-america/us/california/norcal-latest.osm.pbf",
"bbox": {
"top": 37.9999225069647,
"bottom": 37.6249329829376,
"left": -122.37608299613,
"right": -122.00107120948
}
}
}

def list_packs():
for pack_name in PACKS:
print("\t{}: {}".format(pack_name, PACKS[pack_name]["description"]))

def make_pack(pack_name):
def make_pack(pack_name, options={}):
make_database()
pack = PACKS[pack_name]
paths = make_rocks(pack["rock"])
paths = make_rocks(pack["rock"], args)
# These should happen last b/c they depend on the spatial scope of the
# database tables populated above
# TODO Make the OSM mbtiles
# util.call_cmd(["./osm.sh", pack["osm"]])
paths = make_ways(pack["osm"], bbox=pack["bbox"])
# TODO Make the contours mbtiles
util.call_cmd(["./elevation.sh"])
# TODO zip up all relevant files

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Make a data pack for Underfoot")
parser.add_argument("pack", metavar="PACK_NAME", type=str, help="Make the specified pack. Use `list` to list available packs")
parser.add_argument("--clean", action="store_true", help="Clean all cached files before building")
args = parser.parse_args()

if args.pack == "list":
print("Available packs:")
list_packs()
else:
print("making pack: {}".format(args.pack))
make_pack(args.pack)
make_pack(args.pack, args)
35 changes: 25 additions & 10 deletions rocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
"""

import os
import shutil
import sys
import re
import psycopg2
import time
Expand Down Expand Up @@ -142,11 +144,14 @@ def clip_source_polygons_by_mask(source_table_name):
))
util.run_sql("DELETE FROM {} WHERE ST_GeometryType(geom) = 'ST_GeometryCollection'".format(source_table_name))

"""Load geological units into the database from the specified sources
Sources must be a list of source names
"""
def load_units(sources):
"""Load geological units into the database from the specified sources
Parameters
----------
sources : list
Names of sources to load
"""
# Drop existing units and masks tables
for table_name in [final_table_name, mask_table_name]:
util.run_sql("DROP TABLE IF EXISTS {}".format(table_name), dbname=DBNAME)
Expand Down Expand Up @@ -263,16 +268,21 @@ def load_units(sources):

print("Database {} created with table {}".format(DBNAME, final_table_name))

def clean_sources(sources):
"""Clean any cached data for specified sources"""
for idx, source_identifier in enumerate(sources):
path = os.path.join("sources", "{}.py".format(source_identifier))
work_path = util.make_work_dir(path)
shutil.rmtree(work_path)

def make_mbtiles():
# ./node_modules/tl/bin/tl.js copy -i underfoot_units.json -z 7 -Z 14 \
# 'postgis://underfoot:underfoot@localhost:5432/underfoot?table=units' \
# mbtiles://./underfoot_units.mbtiles
path = "./underfoot_units.mbtiles"
"""Export rock units into am MBTiles file"""
path = "./underfoot_rock_units.mbtiles"
cmd = [
"node_modules/tl/bin/tl.js",
"copy",
"-i",
"underfoot_units.json",
"underfoot_rock_units.json",
"-z",
"7",
"-Z",
Expand All @@ -283,7 +293,12 @@ def make_mbtiles():
util.call_cmd(cmd)
return os.path.abspath(path)

def make_rocks(sources):
def make_rocks(sources, options={}):
if options.clean:
clean_sources(sources)
load_units(sources)
mbtiles_path = make_mbtiles()
return [mbtiles_path]

if __name__ == "__main__":
make_rocks(sys.argv)
26 changes: 26 additions & 0 deletions underfoot_rock_units.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"vector_layers": [
{
"id": "rock_units",
"description": "Geological units",
"fields": {
"code": "String",
"title": "String",
"description": "String",
"lithology": "String",
"lithologies": "Array",
"rock_type": "String",
"formation": "String",
"grouping": "String",
"span": "String",
"min_age": "String",
"max_age": "String",
"est_age": "String",
"source": "String"
},
"maxzoom": 14,
"minzoom": 0
}
],
"attribution": "<a href='http://openstreetmap.org'>OSM contributors</a>"
}

0 comments on commit daa21e5

Please sign in to comment.