Skip to content

Commit

Permalink
Updates to v0.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
tmcw committed May 18, 2012
1 parent 4affcbc commit 0ce0245
Show file tree
Hide file tree
Showing 8 changed files with 100 additions and 79 deletions.
9 changes: 8 additions & 1 deletion CHANGES.txt
@@ -1 +1,8 @@
v0.0.1, March 14, 2011 -- Initial release ### v0.1.0

* Switch tests to nosetest
* Switch default output to XYZ

### v0.0.1, March 14, 2011

* Initial release
16 changes: 0 additions & 16 deletions Makefile

This file was deleted.

19 changes: 14 additions & 5 deletions README.md
Expand Up @@ -38,11 +38,20 @@ Import a directory into a `mbtiles` file


MBUtil imports and exports metadata as JSON, in the root of the tile directory, as a file named `metadata.json`. MBUtil imports and exports metadata as JSON, in the root of the tile directory, as a file named `metadata.json`.


{ ```javascript
"name": "World Light", {
"description": "A Test Metadata", "name": "World Light",
"version": "3" "description": "A Test Metadata",
} "version": "3"
}
```

## Testing

This project uses [nosetests](http://readthedocs.org/docs/nose/en/latest/) for testing. Install nosetests
and run

nosetests


## Authors ## Authors


Expand Down
61 changes: 29 additions & 32 deletions mb-util
@@ -1,16 +1,16 @@
#!/usr/bin/env python #!/usr/bin/env python

# MBUtil: a tool for MBTiles files # MBUtil: a tool for MBTiles files
# Supports importing, exporting, and more # Supports importing, exporting, and more
# #
# (c) Development Seed 2011 # (c) Development Seed 2012
# Licensed under BSD # Licensed under BSD
import logging
import os, sys import logging, os, sys
from optparse import OptionParser from optparse import OptionParser


from mbutil import mbtiles_to_disk, disk_to_mbtiles from mbutil import mbtiles_to_disk, disk_to_mbtiles



if __name__ == '__main__': if __name__ == '__main__':


logging.basicConfig(level=logging.DEBUG) logging.basicConfig(level=logging.DEBUG)
Expand All @@ -20,42 +20,39 @@ if __name__ == '__main__':
Examples: Examples:
Export an mbtiles file to a directory of files: Export an mbtiles file to a directory of files:
$ mb-util world.mbtiles tiles # tiles must not alredy exist $ mb-util world.mbtiles tiles # tiles must not already exist
Import a directory of tiles into an mbtiles file: Import a directory of tiles into an mbtiles file:
$ mb-util tiles world.mbtiles # mbtiles file must not already exist""") $ mb-util tiles world.mbtiles # mbtiles file must not already exist""")


# todo - add support for compression
#parser.add_option('-w', '--window', dest='window',
# help='compression window size. larger values faster, dangerouser',
# type='int',
# default=2000)

parser.add_option('--scheme', dest='scheme', parser.add_option('--scheme', dest='scheme',
help='Tiling scheme for exporting tiles. Default is "tms" (z/x/y), other option is "osm" which is also z/x/y but uses a flipped y coordinate', help='''Tiling scheme for exporting tiles. Default is "xyz" (z/x/y),
other option is "tms" which is also z/x/y
but uses a flipped y coordinate''',
type='string', type='string',
default='tms') default='xyz')


(options, args) = parser.parse_args() (options, args) = parser.parse_args()


# Transfer operations # Transfer operations
if len(args) == 2: if len(args) != 2:
if os.path.isfile(args[0]) and os.path.exists(args[1]):
sys.stderr.write('To export MBTiles to disk, specify a directory that does not yet exist\n')
sys.exit(1)

# to disk
if os.path.isfile(args[0]) and not os.path.exists(args[1]):
mbtiles_file, directory_path = args
mbtiles_to_disk(mbtiles_file, directory_path, **options.__dict__)

if os.path.isdir(args[0]) and os.path.isfile(args[1]):
sys.stderr.write('Importing tiles into already-existing MBTiles is not yet supported\n')
sys.exit(1)

# to mbtiles
if os.path.isdir(args[0]) and not os.path.isfile(args[0]):
directory_path, mbtiles_file = args
disk_to_mbtiles(directory_path, mbtiles_file, **options.__dict__)
else:
parser.print_help() parser.print_help()
sys.exit(1)

if os.path.isfile(args[0]) and os.path.exists(args[1]):
sys.stderr.write('To export MBTiles to disk, specify a directory that does not yet exist\n')
sys.exit(1)

# to disk
if os.path.isfile(args[0]) and not os.path.exists(args[1]):
mbtiles_file, directory_path = args
mbtiles_to_disk(mbtiles_file, directory_path, **options.__dict__)

if os.path.isdir(args[0]) and os.path.isfile(args[1]):
sys.stderr.write('Importing tiles into already-existing MBTiles is not yet supported\n')
sys.exit(1)

# to mbtiles
if os.path.isdir(args[0]) and not os.path.isfile(args[0]):
directory_path, mbtiles_file = args
disk_to_mbtiles(directory_path, mbtiles_file, **options.__dict__)
48 changes: 25 additions & 23 deletions mbutil/util.py
Expand Up @@ -3,10 +3,10 @@
# MBUtil: a tool for MBTiles files # MBUtil: a tool for MBTiles files
# Supports importing, exporting, and more # Supports importing, exporting, and more
# #
# (c) Development Seed 2011 # (c) Development Seed 2012
# Licensed under BSD # Licensed under BSD


import sqlite3, uuid, sys, logging, time, os, json, zlib, glob, shutil import sqlite3, uuid, sys, logging, time, os, json, zlib


logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)


Expand All @@ -21,10 +21,10 @@ def mbtiles_setup(cur):
tile_row integer, tile_row integer,
tile_data blob); tile_data blob);
""") """)
cur.execute("""create table metadata cur.execute("""create table metadata
(name text, value text);""") (name text, value text);""")
cur.execute("""create unique index name on metadata (name);""") cur.execute("""create unique index name on metadata (name);""")
cur.execute("""create unique index tile_index on tiles cur.execute("""create unique index tile_index on tiles
(zoom_level, tile_column, tile_row);""") (zoom_level, tile_column, tile_row);""")


def mbtiles_connect(mbtiles_file): def mbtiles_connect(mbtiles_file):
Expand All @@ -39,7 +39,7 @@ def mbtiles_connect(mbtiles_file):
def optimize_connection(cur): def optimize_connection(cur):
cur.execute("""PRAGMA synchronous=0""") cur.execute("""PRAGMA synchronous=0""")
cur.execute("""PRAGMA locking_mode=EXCLUSIVE""") cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
cur.execute("""PRAGMA journal_mode=TRUNCATE""") cur.execute("""PRAGMA journal_mode=DELETE""")


def compression_prepare(cur, con): def compression_prepare(cur, con):
cur.execute(""" cur.execute("""
Expand All @@ -49,9 +49,9 @@ def compression_prepare(cur, con):
""") """)
cur.execute(""" cur.execute("""
CREATE TABLE if not exists map ( CREATE TABLE if not exists map (
zoom_level integer, zoom_level integer,
tile_column integer, tile_column integer,
tile_row integer, tile_row integer,
tile_id VARCHAR(256)); tile_id VARCHAR(256));
""") """)


Expand Down Expand Up @@ -83,8 +83,8 @@ def compression_do(cur, con, chunk):
if r[3] in files: if r[3] in files:
overlapping = overlapping + 1 overlapping = overlapping + 1
start = time.time() start = time.time()
query = """insert into map query = """insert into map
(zoom_level, tile_column, tile_row, tile_id) (zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)""" values (?, ?, ?, ?)"""
logger.debug("insert: %s" % (time.time() - start)) logger.debug("insert: %s" % (time.time() - start))
cur.execute(query, (r[0], r[1], r[2], ids[files.index(r[3])])) cur.execute(query, (r[0], r[1], r[2], ids[files.index(r[3])]))
Expand All @@ -96,14 +96,14 @@ def compression_do(cur, con, chunk):
files.append(r[3]) files.append(r[3])


start = time.time() start = time.time()
query = """insert into images query = """insert into images
(tile_id, tile_data) (tile_id, tile_data)
values (?, ?)""" values (?, ?)"""
cur.execute(query, (str(id), sqlite3.Binary(r[3]))) cur.execute(query, (str(id), sqlite3.Binary(r[3])))
logger.debug("insert into images: %s" % (time.time() - start)) logger.debug("insert into images: %s" % (time.time() - start))
start = time.time() start = time.time()
query = """insert into map query = """insert into map
(zoom_level, tile_column, tile_row, tile_id) (zoom_level, tile_column, tile_row, tile_id)
values (?, ?, ?, ?)""" values (?, ?, ?, ?)"""
cur.execute(query, (r[0], r[1], r[2], id)) cur.execute(query, (r[0], r[1], r[2], id))
logger.debug("insert into map: %s" % (time.time() - start)) logger.debug("insert into map: %s" % (time.time() - start))
Expand All @@ -118,10 +118,10 @@ def compression_finalize(cur):
images.tile_data as tile_data FROM images.tile_data as tile_data FROM
map JOIN images on images.tile_id = map.tile_id;""") map JOIN images on images.tile_id = map.tile_id;""")
cur.execute(""" cur.execute("""
CREATE UNIQUE INDEX map_index on map CREATE UNIQUE INDEX map_index on map
(zoom_level, tile_column, tile_row);""") (zoom_level, tile_column, tile_row);""")
cur.execute(""" cur.execute("""
CREATE UNIQUE INDEX images_id on images CREATE UNIQUE INDEX images_id on images
(tile_id);""") (tile_id);""")
cur.execute("""vacuum;""") cur.execute("""vacuum;""")
cur.execute("""analyze;""") cur.execute("""analyze;""")
Expand Down Expand Up @@ -177,13 +177,12 @@ def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):
logger.debug("Exporting MBTiles to disk") logger.debug("Exporting MBTiles to disk")
logger.debug("%s --> %s" % (mbtiles_file, directory_path)) logger.debug("%s --> %s" % (mbtiles_file, directory_path))
con = mbtiles_connect(mbtiles_file) con = mbtiles_connect(mbtiles_file)
cur = con.cursor()
os.mkdir("%s" % directory_path) os.mkdir("%s" % directory_path)
metadata = dict(con.execute('select name, value from metadata;').fetchall()) metadata = dict(con.execute('select name, value from metadata;').fetchall())
json.dump(metadata, open('%s/metadata.json' % directory_path, 'w'),indent=4) json.dump(metadata, open('%s/metadata.json' % directory_path, 'w'), indent=4)
count = con.execute('select count(zoom_level) from tiles;').fetchone()[0] count = con.execute('select count(zoom_level) from tiles;').fetchone()[0]
done = 0 done = 0
msg ='' msg = ''
service_version = metadata.get('version', '1.0.0') service_version = metadata.get('version', '1.0.0')
base_path = os.path.join(directory_path, base_path = os.path.join(directory_path,
service_version, service_version,
Expand Down Expand Up @@ -222,7 +221,7 @@ def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):


# grids # grids
done = 0 done = 0
msg ='' msg = ''
try: try:
count = con.execute('select count(zoom_level) from grids;').fetchone()[0] count = con.execute('select count(zoom_level) from grids;').fetchone()[0]
grids = con.execute('select zoom_level, tile_column, tile_row, grid from grids;') grids = con.execute('select zoom_level, tile_column, tile_row, grid from grids;')
Expand All @@ -232,8 +231,7 @@ def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):
while g: while g:
zoom_level = g[0] # z zoom_level = g[0] # z
tile_column = g[1] # x tile_column = g[1] # x
tile_row = g[2] # y y = g[2] # y
y = g[2]
if kwargs.get('scheme') == 'osm': if kwargs.get('scheme') == 'osm':
y = flip_y(zoom_level,y) y = flip_y(zoom_level,y)
grid_dir = os.path.join(base_path, str(zoom_level), str(tile_column)) grid_dir = os.path.join(base_path, str(zoom_level), str(tile_column))
Expand All @@ -243,7 +241,11 @@ def mbtiles_to_disk(mbtiles_file, directory_path, **kwargs):
f = open(grid, 'w') f = open(grid, 'w')
grid_json = json.loads(zlib.decompress(g[3])) grid_json = json.loads(zlib.decompress(g[3]))
# join up with the grid 'data' which is in pieces when stored in mbtiles file # join up with the grid 'data' which is in pieces when stored in mbtiles file
grid_data_cursor = con.execute('select key_name, key_json FROM grid_data WHERE zoom_level = %(zoom_level)d and tile_column = %(tile_column)d and tile_row = %(tile_row)d;' % locals()) grid_data_cursor = con.execute('''select key_name, key_json FROM
grid_data WHERE
zoom_level = %(zoom_level)d and
tile_column = %(tile_column)d and
tile_row = %(tile_row)d;''' % locals())
grid_data = grid_data_cursor.fetchone() grid_data = grid_data_cursor.fetchone()
data = {} data = {}
while grid_data: while grid_data:
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Expand Up @@ -2,9 +2,9 @@


setup( setup(
name='mbutil', name='mbutil',
version='0.0.2', version='0.1.0',
author='Tom MacWright', author='Tom MacWright',
author_email='macwright@gmail.com', author_email='tom@macwright.org',
packages=['mbutil'], packages=['mbutil'],
scripts=['mb-util'], scripts=['mb-util'],
url='https://github.com/mapbox/mbutil', url='https://github.com/mapbox/mbutil',
Expand Down
Binary file added test/data/one_tile.mbtiles
Binary file not shown.
22 changes: 22 additions & 0 deletions test/test.py
@@ -0,0 +1,22 @@
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles

def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass

try: os.path.mkdir('test/output')
except Exception: pass

@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/1.0.0/shadowplay/0/0/0.png')

@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/1.0.0/shadowplay/0/0/0.png')
disk_to_mbtiles('test/output/one', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')

0 comments on commit 0ce0245

Please sign in to comment.