Commit 0703e705 authored by Akshat Tandon's avatar Akshat Tandon

Add script for automating lower level tile generation

Input: A text file which contains information regarding Natural Earth datasets for each lower level
Output: Vector tiles of the levels described in the above text file
parent 426fd7ac
*1
ne_110m_land
ne_110m_admin_0_boundary_lines_land
*3
ne_110m_land
ne_110m_admin_0_boundary_lines_land
*5
ne_50m_land
ne_50m_admin_0_boundary_lines_land
ne_50m_admin_1_states_provinces_lines
*7
ne_10m_land
ne_10m_admin_0_boundary_lines_land
ne_10m_admin_1_states_provinces
ne_10m_roads
ne_10m_railroads
ne_10m_rivers_lake_centerlines
ne_10m_lakes
ne_10m_lakes_north_america
ne_10m_lakes_europe
ne_10m_playas
ne_10m_antarctic_ice_shelves_lines
ne_10m_antarctic_ice_shelves_polys
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the Marble Virtual Globe.
#
# This program is free software licensed under the GNU LGPL. You can
# find a copy of this license in LICENSE.txt in the top directory of
# the source code.
#
# Copyright 2016 Akshat Tandon <akshat.tandon@research.iiit.ac.in>
#
"""
Generates low level tiles for Marble using Natural Earth data
"""
import sys
import os
import math
import argparse
import urllib3
import zipfile
sys.path.append('../shp2osm')
sys.path.append('../vectortilecreator')
import polyshp2osm
import vectortilecreator
def unzip_file(filename, in_dir):
print(in_dir)
path_zip = os.path.join(in_dir, filename + '.zip')
with zipfile.ZipFile(path_zip ,"r") as zip_ref:
path_dir = os.path.join(in_dir, filename)
os.mkdir(path_dir)
zip_ref.extractall(path_dir)
os.remove(path_zip)
def generate_url(filename):
url = 'www.naturalearthdata.com/http//www.naturalearthdata.com/download/'
cultural_tokens = ['admin', 'populated', 'roads', 'railroads', 'airports', 'ports', 'urban', 'parks', 'time', 'cultural']
file_tokens = filename.split('_')
url += file_tokens[1] + '/'
data_type = 'physical'
for token in file_tokens:
if token in cultural_tokens:
data_type = 'cultural'
break
url += data_type + '/'
url += filename + '.zip'
return url
def download(filename, in_dir):
url = generate_url(filename)
print('Url', url)
http = urllib3.PoolManager()
r = http.request('GET', url, preload_content=False)
chunk_size = 8192
file_size_dl = 0
fileSize = int(r.getheader("content-length"))
with open(os.path.join(in_dir, filename + '.zip'), 'wb') as out:
while True:
data = r.read(chunk_size)
if data is None or len(data) == 0:
break
file_size_dl += len(data)
out.write(data)
print ("Downloading %s: %.1f/%.1f Mb (%3.1f%%)\r" % (filename, file_size_dl / 1024.0 / 1024.0, fileSize / 1024.0 / 1024.0, file_size_dl * 100. / fileSize), end='')
r.release_conn()
out.close()
print ("Done")
def parse_file(filename, in_dir):
level_info = {}
path = os.path.join(in_dir, filename)
print('ak Path', path)
f = open(path, 'rU')
for line in f:
line = line.rstrip()
if len(line) == 0:
continue
if line[0] == '*':
level = int(line[1:])
level_info[level] = []
else:
level_info[level].append(line)
return level_info
def check_existence(filename, in_dir):
path = os.path.join(in_dir, filename)
if not os.path.exists(path):
download(filename, in_dir)
unzip_file(filename, in_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generates low level tiles for Marble using Natural Earth data')
parser.add_argument('file', help='a file with information containing natural datasets for specific levels.')
parser.add_argument('-i', '--in_dir', help='directory to read/process data from', default='.')
parser.add_argument('-o', '--out_dir', help='directory to write tiles to', default='.')
parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.')
parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)')
parser.add_argument('-ow', '--overwrite', action='store_true', help='Create tiles even if they exist already')
args = parser.parse_args()
exception_names = ['ne_50m_admin_1_states_provinces_lines']
level_info = parse_file(args.file, args.in_dir)
print("Level Info:", level_info)
for level in level_info:
abs_file_paths = []
for filename in level_info[level]:
print('Checking - {}'.format(filename))
if filename not in exception_names:
check_existence(filename, args.in_dir)
path = os.path.join(args.in_dir, filename) + '/' + filename + '.shp'
abs_file_paths.append(path)
else:
print('Download this data manually - {}'.format(filename))
print('Level has following SHP datasets: ', abs_file_paths)
polyshp2osm.run(abs_file_paths, 1, 5000000, 'tiny_planet_{}'.format(level))
print('Tiny planetosm for Level = {} complete.'.format(level))
f = open('bound_info_{}'.format(level), "w")
print('tiny_planet_{}.1.osm;Level;-180.0;-86.0;180.0;86.0'.format(level), file=f)
f.close()
vectortilecreator.run(['bound_info_{}'.format(level)], args.cache, args.refresh, args.out_dir, args.overwrite, [level])
...@@ -604,7 +604,17 @@ def run(filenames, slice_count=1, obj_count=5000000, output_location=None, no_so ...@@ -604,7 +604,17 @@ def run(filenames, slice_count=1, obj_count=5000000, output_location=None, no_so
"""Run the converter. Requires open_file, file_name, id_counter, """Run the converter. Requires open_file, file_name, id_counter,
file_counter, counter to be defined in global space; not really a very good file_counter, counter to be defined in global space; not really a very good
singleton.""" singleton."""
global id_counter, file_counter, counter, file_name, open_file, namespace, non_geom, non_fcla_dict global id_counter, file_counter, counter, file_name, open_file, namespace, non_geom, non_fcla_dict, nodes, ways, relations
open_file = None
file_name = None
id_counter = 1
file_counter = 0
counter = 0
geom_counter = {}
if output_location: if output_location:
file_name = output_location file_name = output_location
start_new_file() start_new_file()
...@@ -704,6 +714,9 @@ def run(filenames, slice_count=1, obj_count=5000000, output_location=None, no_so ...@@ -704,6 +714,9 @@ def run(filenames, slice_count=1, obj_count=5000000, output_location=None, no_so
write_relation_multipolygon(relation) write_relation_multipolygon(relation)
close_file() close_file()
nodes = [] #(id, lon, lat, tags)
ways = [] #(id, node_refs, tags)
relations = [] #(id, ways)
if __name__ == "__main__": if __name__ == "__main__":
if DONT_RUN: if DONT_RUN:
......
...@@ -51,7 +51,6 @@ def download(url, directory, refresh): ...@@ -51,7 +51,6 @@ def download(url, directory, refresh):
# else download again # else download again
else: else:
return filename return filename
http = urllib3.PoolManager() http = urllib3.PoolManager()
r = http.request('GET', url, preload_content=False) r = http.request('GET', url, preload_content=False)
chunk_size = 8192 chunk_size = 8192
...@@ -73,50 +72,55 @@ def download(url, directory, refresh): ...@@ -73,50 +72,55 @@ def download(url, directory, refresh):
return filename return filename
if __name__ == "__main__": def run(filenames, cache, refresh, directory, overwrite, zoomLevels):
parser = argparse.ArgumentParser(description='Create OSM Vector Tiles for Marble') for csvfilename in filenames:
parser.add_argument('file', nargs='+', help='a file with semicolon separated lines in the form filename.osm.pbf;Area Name;west;south;east;north')
parser.add_argument('-o', '--overwrite', action='store_true', help='Create tiles even if they exist already')
parser.add_argument('-d', '--directory', help='directory to write tiles to', default='.')
parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.')
parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)')
parser.add_argument('-z', '--zoomLevels', type=int, nargs='+', help='zoom levels to generate', default=[13,15,17])
args = parser.parse_args()
for csvfilename in args.file:
with open(csvfilename, 'r') as csvfile: with open(csvfilename, 'r') as csvfile:
reader = csv.reader(csvfile, delimiter=';', quotechar='|') reader = csv.reader(csvfile, delimiter=';', quotechar='|')
for bounds in reader: for bounds in reader:
filename = download(bounds[0], args.cache, args.refresh) filename = download(bounds[0], cache, refresh)
for zoom in args.zoomLevels: for zoom in zoomLevels:
bottomLeft = deg2num(float(bounds[3]), float(bounds[2]), zoom) bottomLeft = deg2num(float(bounds[3]), float(bounds[2]), zoom)
topRight = deg2num(float(bounds[5]), float(bounds[4]), zoom) topRight = deg2num(float(bounds[5]), float(bounds[4]), zoom)
xDiff = topRight[0]-bottomLeft[0] xDiff = topRight[0]-bottomLeft[0]
yDiff = bottomLeft[1]-topRight[1] yDiff = bottomLeft[1]-topRight[1]
total = xDiff*yDiff total = xDiff*yDiff
count = 0 count = 0
cutted = "{}/{}.{}-{}-{}-{}.osm.o5m".format(args.cache, filename, bounds[2], bounds[3], bounds[4], bounds[5]) cutted = "{}/{}.{}-{}-{}-{}.osm.o5m".format(cache, filename, bounds[2], bounds[3], bounds[4], bounds[5])
if not os.path.exists(cutted): if not os.path.exists(cutted):
print ("Creating cut out region {}".format(cutted)) print ("Creating cut out region {}".format(cutted))
call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(bounds[2], bounds[3], bounds[4], bounds[5]), "-o={}".format(cutted), os.path.join(args.cache, filename)]) call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(bounds[2], bounds[3], bounds[4], bounds[5]), "-o={}".format(cutted), os.path.join(cache, filename)])
for x in range(1+bottomLeft[0], topRight[0]+1): for x in range(1+bottomLeft[0], topRight[0]+1):
for y in range(1+topRight[1], bottomLeft[1]+1): for y in range(1+topRight[1], bottomLeft[1]+1):
count += 1 count += 1
tl = num2deg(x-1, y-1, zoom) tl = num2deg(x-1, y-1, zoom)
br = num2deg(x, y, zoom) br = num2deg(x, y, zoom)
path = "{}/{}/{}".format(args.directory, zoom, x-1) path = "{}/{}/{}".format(directory, zoom, x-1)
target = "{}.o5m".format(y-1) target = "{}.o5m".format(y-1)
filterTarget = "{}_tmp.o5m".format(y-1) filterTarget = "{}_tmp.o5m".format(y-1)
if not args.overwrite and os.path.exists(os.path.join(path, target)): if not overwrite and os.path.exists(os.path.join(path, target)):
print("Skipping existing file {}\r".format(os.path.join(path, target)), end='') print("Skipping existing file {}\r".format(os.path.join(path, target)), end='')
else: else:
call(["mkdir", "-p", path]) call(["mkdir", "-p", path])
print ("{} level {}: {}/{} {}\r".format(bounds[1], zoom, count, total, os.path.join(path, target)), end='') print ("{} level {}: {}/{} {}\r".format(bounds[1], zoom, count, total, os.path.join(path, target)), end='')
filterLevel = "levels/{}.level".format(zoom) filterLevel = "levels/{}.level".format(zoom)
if os.path.exists(filterLevel): if os.path.exists(filterLevel):
call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, filterTarget))]) call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, filterTarget))])
call(["osmfilter", "--parameter-file={}".format(filterLevel), os.path.join(path, filterTarget), "-o={}".format(os.path.join(path, target))]) call(["osmfilter", "--parameter-file={}".format(filterLevel), os.path.join(path, filterTarget), "-o={}".format(os.path.join(path, target))])
os.remove(os.path.join(path, filterTarget)) os.remove(os.path.join(path, filterTarget))
else: else:
call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, target))]) call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, target))])
call(["chmod", "644", os.path.join(path, target)]) call(["chmod", "644", os.path.join(path, target)])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Create OSM Vector Tiles for Marble')
parser.add_argument('file', nargs='+', help='a file with semicolon separated lines in the form filename.osm.pbf;Area Name;west;south;east;north')
parser.add_argument('-o', '--overwrite', action='store_true', help='Create tiles even if they exist already')
parser.add_argument('-d', '--directory', help='directory to write tiles to', default='.')
parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.')
parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)')
parser.add_argument('-z', '--zoomLevels', type=int, nargs='+', help='zoom levels to generate', default=[13,15,17])
args = parser.parse_args()
run(args.file, args.cache, args.refresh, args.directory, args.overwrite)
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment