Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

+ code: avoid exposing unsafe static methods in datasource_cache ( #1451

)

+ python: remove redundent 'instance' method (mapnik.DatasourceCache)
+ python: reflect plugin_directories method
+ tests: update python usage

TODO: consider using similar approach in FontEngine etc..
TODO: consider returning reference from singleton::instance() to
      safeguard from accidental deleting a 'singleton' pointer
  • Loading branch information...
commit a513d3f97d08e05cb57077202a130146ce7c9145 1 parent 69fb17c
@artemp artemp authored
Showing with 194 additions and 144 deletions.
  1. +34 −34 bindings/python/mapnik/__init__.py
  2. +1 −1  bindings/python/mapnik_datasource.cpp
  3. +65 −14 bindings/python/mapnik_datasource_cache.cpp
  4. +19 −19 include/mapnik/datasource_cache.hpp
  5. +19 −20 src/datasource_cache.cpp
  6. +1 −1  src/deepcopy.cpp
  7. +1 −1  tests/python_tests/cairo_test.py
  8. +1 −1  tests/python_tests/csv_test.py
  9. +9 −9 tests/python_tests/datasource_test.py
  10. +3 −3 tests/python_tests/feature_id_test.py
  11. +1 −1  tests/python_tests/geojson_plugin_test.py
  12. +7 −7 tests/python_tests/map_query_test.py
  13. +2 −2 tests/python_tests/markers_complex_rendering_test.py
  14. +1 −1  tests/python_tests/multi_tile_raster_test.py
  15. +1 −1  tests/python_tests/ogr_and_shape_geometries_test.py
  16. +1 −1  tests/python_tests/ogr_test.py
  17. +1 −1  tests/python_tests/osm_test.py
  18. +1 −1  tests/python_tests/postgis_test.py
  19. +4 −4 tests/python_tests/python_plugin_test.py
  20. +2 −2 tests/python_tests/raster_alpha_test.py
  21. +2 −2 tests/python_tests/raster_colormapped_test.py
  22. +6 −6 tests/python_tests/raster_symbolizer_test.py
  23. +8 −8 tests/python_tests/reprojection_test.py
  24. +1 −1  tests/python_tests/shapefile_test.py
  25. +2 −2 tests/python_tests/sqlite_rtree_test.py
  26. +1 −1  tests/python_tests/sqlite_test.py
View
68 bindings/python/mapnik/__init__.py
@@ -54,7 +54,7 @@ def bootstrap_env():
The settings file should be a python file with an 'env' variable
that declares a dictionary of key:value pairs to push into the
global process environment, if not already set, like:
-
+
env = {'ICU_DATA':'/usr/local/share/icu/'}
"""
if os.path.exists(os.path.join(os.path.dirname(__file__),'mapnik_settings.py')):
@@ -136,18 +136,18 @@ def __repr__(self):
def forward(self, projection):
"""
- Projects the point from the geographic coordinate
- space into the cartesian space. The x component is
- considered to be longitude, the y component the
+ Projects the point from the geographic coordinate
+ space into the cartesian space. The x component is
+ considered to be longitude, the y component the
latitude.
- Returns the easting (x) and northing (y) as a
+ Returns the easting (x) and northing (y) as a
coordinate pair.
- Example: Project the geographic coordinates of the
+ Example: Project the geographic coordinates of the
city center of Stuttgart into the local
- map projection (GK Zone 3/DHDN, EPSG 31467)
- >>> p = Projection('+init=epsg:31467')
+ map projection (GK Zone 3/DHDN, EPSG 31467)
+ >>> p = Projection('+init=epsg:31467')
>>> Coord(9.1, 48.7).forward(p)
Coord(3507360.12813,5395719.2749)
"""
@@ -155,19 +155,19 @@ def forward(self, projection):
def inverse(self, projection):
"""
- Projects the point from the cartesian space
- into the geographic space. The x component is
- considered to be the easting, the y component
+ Projects the point from the cartesian space
+ into the geographic space. The x component is
+ considered to be the easting, the y component
to be the northing.
- Returns the longitude (x) and latitude (y) as a
+ Returns the longitude (x) and latitude (y) as a
coordinate pair.
- Example: Project the cartesian coordinates of the
+ Example: Project the cartesian coordinates of the
city center of Stuttgart in the local
map projection (GK Zone 3/DHDN, EPSG 31467)
into geographic coordinates:
- >>> p = Projection('+init=epsg:31467')
+ >>> p = Projection('+init=epsg:31467')
>>> Coord(3507360.12813,5395719.2749).inverse(p)
Coord(9.1, 48.7)
"""
@@ -175,13 +175,13 @@ def inverse(self, projection):
class _Box2d(Box2d,_injector):
"""
- Represents a spatial envelope (i.e. bounding box).
+ Represents a spatial envelope (i.e. bounding box).
Following operators are defined for Box2d:
Addition:
- e1 + e2 is equvalent to e1.expand_to_include(e2) but yields
+ e1 + e2 is equvalent to e1.expand_to_include(e2) but yields
a new envelope instead of modifying e1
Subtraction:
@@ -191,7 +191,7 @@ class _Box2d(Box2d,_injector):
Multiplication and division change the width and height of the envelope
by the given factor without modifying its center..
- That is, e1 * x is equivalent to:
+ That is, e1 * x is equivalent to:
e1.width(x * e1.width())
e1.height(x * e1.height()),
except that a new envelope is created instead of modifying e1.
@@ -207,8 +207,8 @@ def __repr__(self):
def forward(self, projection):
"""
- Projects the envelope from the geographic space
- into the cartesian space by projecting its corner
+ Projects the envelope from the geographic space
+ into the cartesian space by projecting its corner
points.
See also:
@@ -218,8 +218,8 @@ def forward(self, projection):
def inverse(self, projection):
"""
- Projects the envelope from the cartesian space
- into the geographic space by projecting its corner
+ Projects the envelope from the cartesian space
+ into the geographic space by projecting its corner
points.
See also:
@@ -234,7 +234,7 @@ def __repr__(self):
def forward(self,obj):
"""
- Projects the given object (Box2d or Coord)
+ Projects the given object (Box2d or Coord)
from the geographic space into the cartesian space.
See also:
@@ -245,7 +245,7 @@ def forward(self,obj):
def inverse(self,obj):
"""
- Projects the given object (Box2d or Coord)
+ Projects the given object (Box2d or Coord)
from the cartesian space into the geographic space.
See also:
@@ -331,7 +331,7 @@ def Shapefile(**keywords):
encoding -- file encoding (default 'utf-8')
>>> from mapnik import Shapefile, Layer
- >>> shp = Shapefile(base='/home/mapnik/data',file='world_borders')
+ >>> shp = Shapefile(base='/home/mapnik/data',file='world_borders')
>>> lyr = Layer('Shapefile Layer')
>>> lyr.datasource = shp
@@ -346,7 +346,7 @@ def PostGIS(**keywords):
dbname -- database name to connect to
table -- table name or subselect query
- *Note: if using subselects for the 'table' value consider also
+ *Note: if using subselects for the 'table' value consider also
passing the 'geometry_field' and 'srid' and 'extent_from_subquery'
options and/or specifying the 'geometry_table' option.
@@ -405,7 +405,7 @@ def Raster(**keywords):
tile_stride -- if an image is in tiles, what's the increment between rows/cols (default 1)
>>> from mapnik import Raster, Layer
- >>> raster = Raster(base='/home/mapnik/data',file='elevation.tif',lox=-122.8,loy=48.5,hix=-122.7,hiy=48.6)
+ >>> raster = Raster(base='/home/mapnik/data',file='elevation.tif',lox=-122.8,loy=48.5,hix=-122.7,hiy=48.6)
>>> lyr = Layer('Tiff Layer')
>>> lyr.datasource = raster
@@ -479,7 +479,7 @@ def Ogr(**keywords):
encoding -- file encoding (default 'utf-8')
>>> from mapnik import Ogr, Layer
- >>> datasource = Ogr(base='/home/mapnik/data',file='rivers.geojson',layer='OGRGeoJSON')
+ >>> datasource = Ogr(base='/home/mapnik/data',file='rivers.geojson',layer='OGRGeoJSON')
>>> lyr = Layer('OGR Layer from GeoJSON file')
>>> lyr.datasource = datasource
@@ -507,7 +507,7 @@ def SQLite(**keywords):
use_spatial_index -- boolean, instruct sqlite plugin to use Rtree spatial index (default True)
>>> from mapnik import SQLite, Layer
- >>> sqlite = SQLite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239')
+ >>> sqlite = SQLite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239')
>>> lyr = Layer('SQLite Layer')
>>> lyr.datasource = sqlite
@@ -527,7 +527,7 @@ def Rasterlite(**keywords):
extent -- manually specified data extent (comma delimited string, default None)
>>> from mapnik import Rasterlite, Layer
- >>> rasterlite = Rasterlite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239')
+ >>> rasterlite = Rasterlite(base='/home/mapnik/data',file='osm.db',table='osm',extent='-20037508,-19929239,20037508,19929239')
>>> lyr = Layer('Rasterlite Layer')
>>> lyr.datasource = rasterlite
@@ -547,7 +547,7 @@ def Osm(**keywords):
bbox -- data bounding box for fetching data (default None)
>>> from mapnik import Osm, Layer
- >>> datasource = Osm(file='test.osm')
+ >>> datasource = Osm(file='test.osm')
>>> lyr = Layer('Osm Layer')
>>> lyr.datasource = datasource
@@ -569,7 +569,7 @@ def Kismet(**keywords):
extent -- manually specified data extent (comma delimited string, default None)
>>> from mapnik import Kismet, Layer
- >>> datasource = Kismet(host='localhost',port=2501,extent='-179,-85,179,85')
+ >>> datasource = Kismet(host='localhost',port=2501,extent='-179,-85,179,85')
>>> lyr = Layer('Kismet Server Layer')
>>> lyr.datasource = datasource
@@ -587,7 +587,7 @@ def Geos(**keywords):
extent -- manually specified data extent (comma delimited string, default None)
>>> from mapnik import Geos, Layer
- >>> datasource = Geos(wkt='MULTIPOINT(100 100, 50 50, 0 0)')
+ >>> datasource = Geos(wkt='MULTIPOINT(100 100, 50 50, 0 0)')
>>> lyr = Layer('GEOS Layer from WKT string')
>>> lyr.datasource = datasource
@@ -621,7 +621,7 @@ def __init__(self, envelope=None, geometry_type=None, data_type=None):
def features(self, query):
"""Return an iterable which yields instances of Feature for features within the passed query.
-
+
Required arguments:
query -- a Query instance specifying the region for which features should be returned
"""
@@ -1122,7 +1122,7 @@ def mapnik_version_from_string(version_string):
def register_plugins(path=inputpluginspath):
"""Register plugins located by specified path"""
- DatasourceCache.instance().register_datasources(path)
+ DatasourceCache.register_datasources(path)
def register_fonts(path=fontscollectionpath,valid_extensions=['.ttf','.otf','.ttc','.pfa','.pfb','.ttc','.dfont']):
"""Recursively register fonts using path argument as base directory"""
View
2  bindings/python/mapnik_datasource.cpp
@@ -81,7 +81,7 @@ boost::shared_ptr<mapnik::datasource> create_datasource(const dict& d)
}
}
- return mapnik::datasource_cache::create(params, bind);
+ return mapnik::datasource_cache::instance()->create(params, bind);
}
boost::python::dict describe(boost::shared_ptr<mapnik::datasource> const& ds)
View
79 bindings/python/mapnik_datasource_cache.cpp
@@ -23,25 +23,76 @@
#include <boost/python.hpp>
#include <mapnik/datasource_cache.hpp>
+namespace {
+
+using namespace boost::python;
+
+boost::shared_ptr<mapnik::datasource> create_datasource(const dict& d)
+{
+ bool bind=true;
+ mapnik::parameters params;
+ boost::python::list keys=d.keys();
+ for (int i=0; i<len(keys); ++i)
+ {
+ std::string key = extract<std::string>(keys[i]);
+ object obj = d[key];
+
+ if (key == "bind")
+ {
+ bind = extract<bool>(obj)();
+ continue;
+ }
+
+ extract<std::string> ex0(obj);
+ extract<int> ex1(obj);
+ extract<double> ex2(obj);
+
+ if (ex0.check())
+ {
+ params[key] = ex0();
+ }
+ else if (ex1.check())
+ {
+ params[key] = ex1();
+ }
+ else if (ex2.check())
+ {
+ params[key] = ex2();
+ }
+ }
+
+ return mapnik::datasource_cache::instance()->create(params, bind);
+}
+
+void register_datasources(std::string const& path)
+{
+ mapnik::datasource_cache::instance()->register_datasources(path);
+}
+
+std::vector<std::string> plugin_names()
+{
+ return mapnik::datasource_cache::instance()->plugin_names();
+}
+
+std::string plugin_directories()
+{
+ return mapnik::datasource_cache::instance()->plugin_directories();
+}
+
+}
+
void export_datasource_cache()
{
using mapnik::datasource_cache;
- using mapnik::singleton;
- using mapnik::CreateStatic;
- using namespace boost::python;
- class_<singleton<datasource_cache,CreateStatic>,boost::noncopyable>("Singleton",no_init)
- .def("instance",&singleton<datasource_cache,CreateStatic>::instance,
- return_value_policy<reference_existing_object>())
- .staticmethod("instance")
- ;
-
- class_<datasource_cache,bases<singleton<datasource_cache,CreateStatic> >,
- boost::noncopyable>("DatasourceCache",no_init)
- .def("create",&datasource_cache::create)
+ class_<datasource_cache,
+ boost::noncopyable>("DatasourceCache",no_init)
+ .def("create",&create_datasource)
.staticmethod("create")
- .def("register_datasources",&datasource_cache::register_datasources)
+ .def("register_datasources",&register_datasources)
.staticmethod("register_datasources")
- .def("plugin_names",&datasource_cache::plugin_names)
+ .def("plugin_names",&plugin_names)
.staticmethod("plugin_names")
+ .def("plugin_directories",&plugin_directories)
+ .staticmethod("plugin_directories")
;
}
View
38 include/mapnik/datasource_cache.hpp
@@ -36,28 +36,28 @@
// stl
#include <map>
-namespace mapnik {
-class MAPNIK_DECL datasource_cache :
- public singleton <datasource_cache,CreateStatic>,
- private boost::noncopyable
+namespace mapnik { namespace detail {
+class MAPNIK_DECL datasource_cache_impl
{
- friend class CreateStatic<datasource_cache>;
-private:
- datasource_cache();
- ~datasource_cache();
- datasource_cache(const datasource_cache&);
- datasource_cache& operator=(const datasource_cache&);
- static std::map<std::string,boost::shared_ptr<PluginInfo> > plugins_;
- static bool registered_;
- static bool insert(std::string const& name,const lt_dlhandle module);
- static std::vector<std::string> plugin_directories_;
public:
- static std::vector<std::string> plugin_names();
- static std::string plugin_directories();
- static void register_datasources(std::string const& path);
- static bool register_datasource(std::string const& path);
- static boost::shared_ptr<datasource> create(parameters const& params, bool bind=true);
+ datasource_cache_impl();
+ ~datasource_cache_impl();
+ std::vector<std::string> plugin_names();
+ std::string plugin_directories();
+ void register_datasources(std::string const& path);
+ bool register_datasource(std::string const& path);
+ boost::shared_ptr<datasource> create(parameters const& params, bool bind=true);
+private:
+ std::map<std::string,boost::shared_ptr<PluginInfo> > plugins_;
+ bool registered_;
+ bool insert(std::string const& name,const lt_dlhandle module);
+ std::vector<std::string> plugin_directories_;
+
};
}
+typedef singleton<detail::datasource_cache_impl, CreateStatic> datasource_cache;
+
+}
+
#endif // MAPNIK_DATASOURCE_CACHE_HPP
View
39 src/datasource_cache.cpp
@@ -38,8 +38,7 @@
#include <iostream>
#include <stdexcept>
-namespace mapnik
-{
+namespace mapnik { namespace detail {
bool is_input_plugin (std::string const& filename)
{
@@ -47,21 +46,21 @@ bool is_input_plugin (std::string const& filename)
}
-datasource_cache::datasource_cache()
+datasource_cache_impl::datasource_cache_impl()
{
if (lt_dlinit()) throw std::runtime_error("lt_dlinit() failed");
}
-datasource_cache::~datasource_cache()
+datasource_cache_impl::~datasource_cache_impl()
{
lt_dlexit();
}
-std::map<std::string,boost::shared_ptr<PluginInfo> > datasource_cache::plugins_;
-bool datasource_cache::registered_=false;
-std::vector<std::string> datasource_cache::plugin_directories_;
+//std::map<std::string,boost::shared_ptr<PluginInfo> > datasource_cache::plugins_;
+//bool datasource_cache::registered_=false;
+//std::vector<std::string> datasource_cache::plugin_directories_;
-datasource_ptr datasource_cache::create(const parameters& params, bool bind)
+datasource_ptr datasource_cache_impl::create(const parameters& params, bool bind)
{
boost::optional<std::string> type = params.get<std::string>("type");
if ( ! type)
@@ -71,7 +70,7 @@ datasource_ptr datasource_cache::create(const parameters& params, bool bind)
}
#ifdef MAPNIK_THREADSAFE
- mutex::scoped_lock lock(mutex_);
+ //mutex::scoped_lock lock(mutex_);
#endif
datasource_ptr ds;
@@ -102,34 +101,34 @@ datasource_ptr datasource_cache::create(const parameters& params, bool bind)
}
#ifdef MAPNIK_LOG
- MAPNIK_LOG_DEBUG(datasource_cache) << "datasource_cache: Size=" << params.size();
+ MAPNIK_LOG_DEBUG(datasource_cache_impl) << "datasource_cache: Size=" << params.size();
parameters::const_iterator i = params.begin();
for (; i != params.end(); ++i)
{
- MAPNIK_LOG_DEBUG(datasource_cache) << "datasource_cache: -- " << i->first << "=" << i->second;
+ MAPNIK_LOG_DEBUG(datasource_cache_impl) << "datasource_cache: -- " << i->first << "=" << i->second;
}
#endif
ds = datasource_ptr(create_datasource(params, bind), datasource_deleter());
- MAPNIK_LOG_DEBUG(datasource_cache) << "datasource_cache: Datasource=" << ds << " type=" << type;
+ MAPNIK_LOG_DEBUG(datasource_cache_impl) << "datasource_cache: Datasource=" << ds << " type=" << type;
return ds;
}
-bool datasource_cache::insert(std::string const& type,const lt_dlhandle module)
+bool datasource_cache_impl::insert(std::string const& type,const lt_dlhandle module)
{
return plugins_.insert(make_pair(type,boost::make_shared<PluginInfo>
(type,module))).second;
}
-std::string datasource_cache::plugin_directories()
+std::string datasource_cache_impl::plugin_directories()
{
return boost::algorithm::join(plugin_directories_,", ");
}
-std::vector<std::string> datasource_cache::plugin_names ()
+std::vector<std::string> datasource_cache_impl::plugin_names()
{
std::vector<std::string> names;
std::map<std::string,boost::shared_ptr<PluginInfo> >::const_iterator itr;
@@ -140,11 +139,11 @@ std::vector<std::string> datasource_cache::plugin_names ()
return names;
}
-void datasource_cache::register_datasources(std::string const& str)
+void datasource_cache_impl::register_datasources(std::string const& str)
{
#ifdef MAPNIK_THREADSAFE
- mutex::scoped_lock lock(mapnik::singleton<mapnik::datasource_cache,
- mapnik::CreateStatic>::mutex_);
+ //mutex::scoped_lock lock(mapnik::singleton<mapnik::datasource_cache,
+ // mapnik::CreateStatic>::mutex_);
#endif
boost::filesystem::path path(str);
// TODO - only push unique paths
@@ -175,7 +174,7 @@ void datasource_cache::register_datasources(std::string const& str)
}
}
-bool datasource_cache::register_datasource(std::string const& str)
+bool datasource_cache_impl::register_datasource(std::string const& str)
{
bool success = false;
try
@@ -216,4 +215,4 @@ bool datasource_cache::register_datasource(std::string const& str)
return success;
}
-}
+}}
View
2  src/deepcopy.cpp
@@ -101,7 +101,7 @@ namespace mapnik { namespace util {
parameters p(ds_in->params());
// TODO : re-use datasource extent if already set.
- datasource_ptr ds_out = datasource_cache::create(p);
+ datasource_ptr ds_out = datasource_cache::instance()->create(p);
if (ds_out)
{
lyr_out.set_datasource(ds_out);
View
2  tests/python_tests/cairo_test.py
@@ -10,7 +10,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if mapnik.has_pycairo() and 'sqlite' in mapnik.DatasourceCache.instance().plugin_names():
+if mapnik.has_pycairo() and 'sqlite' in mapnik.DatasourceCache.plugin_names():
def _pycairo_surface(type,sym):
import cairo
View
2  tests/python_tests/csv_test.py
@@ -12,7 +12,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'csv' in mapnik.DatasourceCache.instance().plugin_names():
+if 'csv' in mapnik.DatasourceCache.plugin_names():
def get_csv_ds(filename):
return mapnik.Datasource(type='csv',file=os.path.join('../data/csv/',filename),quiet=True)
View
18 tests/python_tests/datasource_test.py
@@ -11,11 +11,11 @@ def setup():
os.chdir(execution_path('.'))
def test_that_datasources_exist():
- if len(mapnik.DatasourceCache.instance().plugin_names()) == 0:
+ if len(mapnik.DatasourceCache.plugin_names()) == 0:
print '***NOTICE*** - no datasource plugins have been loaded'
def test_field_listing():
- if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
fields = ds.fields()
eq_(fields, ['AREA', 'EAS_ID', 'PRFEDEA'])
@@ -26,14 +26,14 @@ def test_field_listing():
eq_(desc['encoding'],'utf-8')
def test_total_feature_count_shp():
- if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
features = ds.all_features()
num_feats = len(features)
eq_(num_feats, 10)
def test_total_feature_count_json():
- if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Ogr(file='../data/json/points.json',layer_by_index=0)
desc = ds.describe()
eq_(desc['geometry_type'],mapnik.DataGeometryType.Point)
@@ -45,7 +45,7 @@ def test_total_feature_count_json():
eq_(num_feats, 5)
def test_sqlite_reading():
- if 'sqlite' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'sqlite' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.SQLite(file='../data/sqlite/world.sqlite',table_by_index=0)
desc = ds.describe()
eq_(desc['geometry_type'],mapnik.DataGeometryType.Polygon)
@@ -58,14 +58,14 @@ def test_sqlite_reading():
def test_reading_json_from_string():
json = open('../data/json/points.json','r').read()
- if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Ogr(file=json,layer_by_index=0)
features = ds.all_features()
num_feats = len(features)
eq_(num_feats, 5)
def test_feature_envelope():
- if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
features = ds.all_features()
for feat in features:
@@ -76,7 +76,7 @@ def test_feature_envelope():
eq_(intersects, True)
def test_feature_attributes():
- if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'shape' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Shapefile(file='../data/shp/poly.shp')
features = ds.all_features()
feat = features[0]
@@ -86,7 +86,7 @@ def test_feature_attributes():
eq_(ds.field_types(),['float','int','str'])
def test_ogr_layer_by_sql():
- if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Ogr(file='../data/shp/poly.shp', layer_by_sql='SELECT * FROM poly WHERE EAS_ID = 168')
features = ds.all_features()
num_feats = len(features)
View
6 tests/python_tests/feature_id_test.py
@@ -13,7 +13,7 @@ def setup():
os.chdir(execution_path('.'))
def compare_shape_between_mapnik_and_ogr(shapefile,query=None):
- plugins = mapnik.DatasourceCache.instance().plugin_names()
+ plugins = mapnik.DatasourceCache.plugin_names()
if 'shape' in plugins and 'ogr' in plugins:
ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0)
ds2 = mapnik.Shapefile(file=shapefile)
@@ -41,7 +41,7 @@ def test_shapefile_polygon_featureset_id():
def test_shapefile_polygon_feature_query_id():
bbox = (15523428.2632, 4110477.6323, -11218494.8310, 7495720.7404)
query = mapnik.Query(mapnik.Box2d(*bbox))
- if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
ds = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
for fld in ds.fields():
query.add_property_name(fld)
@@ -53,7 +53,7 @@ def test_feature_hit_count():
#bbox = (-14284551.8434, 2074195.1992, -7474929.8687, 8140237.7628)
bbox = (1113194.91,4512803.085,2226389.82,6739192.905)
query = mapnik.Query(mapnik.Box2d(*bbox))
- if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'ogr' in mapnik.DatasourceCache.plugin_names():
ds1 = mapnik.Ogr(file='../data/shp/world_merc.shp',layer_by_index=0)
for fld in ds1.fields():
query.add_property_name(fld)
View
2  tests/python_tests/geojson_plugin_test.py
@@ -11,7 +11,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'geojson' in mapnik.DatasourceCache.instance().plugin_names():
+if 'geojson' in mapnik.DatasourceCache.plugin_names():
def test_geojson_init():
ds = mapnik.Datasource(type='geojson',file='../data/json/escaped.json')
View
14 tests/python_tests/map_query_test.py
@@ -29,14 +29,14 @@ def test_map_query_throw3():
m = mapnik.Map(256,256)
m.query_point(0,0,0)
-if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+if 'shape' in mapnik.DatasourceCache.plugin_names():
# map has never been zoomed (even with data)
@raises(RuntimeError)
def test_map_query_throw4():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml')
m.query_point(0,0,0)
-
+
# invalid coords in general (do not intersect)
@raises(RuntimeError)
def test_map_query_throw5():
@@ -44,7 +44,7 @@ def test_map_query_throw5():
mapnik.load_map(m,'../data/good_maps/agg_poly_gamma_map.xml')
m.zoom_all()
m.query_point(0,9999999999999999,9999999999999999)
-
+
# invalid coords for back projecting
@raises(RuntimeError)
def test_map_query_throw6():
@@ -54,7 +54,7 @@ def test_map_query_throw6():
m.maximum_extent = wgs84_bounds
m.zoom_all()
m.query_point(0,-180,-90)
-
+
def test_map_query_works1():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
@@ -64,7 +64,7 @@ def test_map_query_works1():
fs = m.query_point(0,-11012435.5376, 4599674.6134) # somewhere in kansas
feat = fs.next()
eq_(feat.attributes['NAME_FORMA'],u'United States of America')
-
+
def test_map_query_works2():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
@@ -78,7 +78,7 @@ def test_map_query_works2():
fs = m.query_point(0,-98.9264, 38.1432) # somewhere in kansas
feat = fs.next()
eq_(feat.attributes['NAME'],u'United States')
-
+
def test_map_query_in_pixels_works1():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
@@ -88,7 +88,7 @@ def test_map_query_in_pixels_works1():
fs = m.query_map_point(0,55,100) # somewhere in middle of us
feat = fs.next()
eq_(feat.attributes['NAME_FORMA'],u'United States of America')
-
+
def test_map_query_in_pixels_works2():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
View
4 tests/python_tests/markers_complex_rendering_test.py
@@ -9,7 +9,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'csv' in mapnik.DatasourceCache.instance().plugin_names():
+if 'csv' in mapnik.DatasourceCache.plugin_names():
def test_marker_ellipse_render1():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/marker_ellipse_transform.xml')
@@ -21,7 +21,7 @@ def test_marker_ellipse_render1():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
-
+
def test_marker_ellipse_render2():
# currently crashes https://github.com/mapnik/mapnik/issues/1365
m = mapnik.Map(256,256)
View
2  tests/python_tests/multi_tile_raster_test.py
@@ -13,7 +13,7 @@ def setup():
def test_multi_tile_policy():
srs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('raster')
- if 'raster' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'raster' in mapnik.DatasourceCache.plugin_names():
lyr.datasource = mapnik.Raster(
file = '../data/raster_tiles/${x}/${y}.tif',
lox = -180,
View
2  tests/python_tests/ogr_and_shape_geometries_test.py
@@ -18,7 +18,7 @@ def setup():
"MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))"
]
-plugins = mapnik.DatasourceCache.instance().plugin_names()
+plugins = mapnik.DatasourceCache.plugin_names()
if 'shape' in plugins and 'ogr' in plugins:
def ensure_geometries_are_interpreted_equivalently(filename):
View
2  tests/python_tests/ogr_test.py
@@ -11,7 +11,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'ogr' in mapnik.DatasourceCache.instance().plugin_names():
+if 'ogr' in mapnik.DatasourceCache.plugin_names():
# Shapefile initialization
def test_shapefile_init():
View
2  tests/python_tests/osm_test.py
@@ -11,7 +11,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'osm' in mapnik.DatasourceCache.instance().plugin_names():
+if 'osm' in mapnik.DatasourceCache.plugin_names():
# Shapefile initialization
def test_osm_init():
View
2  tests/python_tests/postgis_test.py
@@ -149,7 +149,7 @@ def postgis_takedown():
# fails as the db is in use: https://github.com/mapnik/mapnik/issues/960
#call('dropdb %s' % MAPNIK_TEST_DBNAME)
-if 'postgis' in mapnik.DatasourceCache.instance().plugin_names() \
+if 'postgis' in mapnik.DatasourceCache.plugin_names() \
and createdb_and_dropdb_on_path() \
and psql_can_connect() \
and shp2pgsql_on_path():
View
8 tests/python_tests/python_plugin_test.py
@@ -21,10 +21,10 @@ def __init__(self):
def features(self, query):
return mapnik.PythonDatasource.wkt_features(
- keys = ('label',),
+ keys = ('label',),
features = (
- ( 'POINT (5 6)', { 'label': 'foo-bar'} ),
- ( 'POINT (60 50)', { 'label': 'buzz-quux'} ),
+ ( 'POINT (5 6)', { 'label': 'foo-bar'} ),
+ ( 'POINT (60 50)', { 'label': 'buzz-quux'} ),
)
)
@@ -94,7 +94,7 @@ def features(self, query):
features = ConcentricCircles(centre, query.bbox, self.step)
)
-if 'python' in mapnik.DatasourceCache.instance().plugin_names():
+if 'python' in mapnik.DatasourceCache.plugin_names():
# make sure we can load from ourself as a module
sys.path.append(execution_path('.'))
View
4 tests/python_tests/raster_alpha_test.py
@@ -10,7 +10,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+if 'gdal' in mapnik.DatasourceCache.plugin_names():
def test_map_alpha_compare():
m = mapnik.Map(600,400)
@@ -23,7 +23,7 @@ def test_map_alpha_compare():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected))
-
+
def test_map_alpha_gradient_compare():
m = mapnik.Map(600,400)
mapnik.load_map(m,'../data/good_maps/raster-alpha-gradient.xml')
View
4 tests/python_tests/raster_colormapped_test.py
@@ -9,7 +9,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+if 'gdal' in mapnik.DatasourceCache.plugin_names():
def test_vrt_rendering():
m = mapnik.Map(512,512)
@@ -22,7 +22,7 @@ def test_vrt_rendering():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected(%s)' % (actual,'tests/python_tests/'+ expected))
-
+
def test_tif_rendering_nodata():
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/tiff_colortable.xml')
View
12 tests/python_tests/raster_symbolizer_test.py
@@ -14,7 +14,7 @@ def setup():
def test_dataraster_coloring():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
- if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'gdal' in mapnik.DatasourceCache.plugin_names():
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
@@ -60,7 +60,7 @@ def test_dataraster_coloring():
def test_dataraster_query_point():
srs = '+init=epsg:32630'
lyr = mapnik.Layer('dataraster')
- if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'gdal' in mapnik.DatasourceCache.plugin_names():
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
@@ -125,7 +125,7 @@ def test_raster_with_alpha_blends_correctly_with_background():
map_layer = mapnik.Layer('test_layer')
filepath = '../data/raster/white-alpha.png'
- if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'gdal' in mapnik.DatasourceCache.plugin_names():
map_layer.datasource = mapnik.Gdal(file=filepath)
map_layer.styles.append('raster_style')
map.layers.append(map_layer)
@@ -145,7 +145,7 @@ def test_raster_warping():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
- if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'gdal' in mapnik.DatasourceCache.plugin_names():
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
@@ -161,7 +161,7 @@ def test_raster_warping():
lyr.styles.append('foo')
_map.layers.append(lyr)
prj_trans = mapnik.ProjTransform(mapnik.Projection(mapSrs),
- mapnik.Projection(lyrSrs))
+ mapnik.Projection(lyrSrs))
_map.zoom_to_box(prj_trans.backward(lyr.envelope()))
im = mapnik.Image(_map.width,_map.height)
@@ -175,7 +175,7 @@ def test_raster_warping_does_not_overclip_source():
lyrSrs = "+init=epsg:32630"
mapSrs = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
lyr = mapnik.Layer('dataraster', lyrSrs)
- if 'gdal' in mapnik.DatasourceCache.instance().plugin_names():
+ if 'gdal' in mapnik.DatasourceCache.plugin_names():
lyr.datasource = mapnik.Gdal(
file = '../data/raster/dataraster.tif',
band = 1,
View
16 tests/python_tests/reprojection_test.py
@@ -9,13 +9,13 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+if 'shape' in mapnik.DatasourceCache.plugin_names():
@raises(RuntimeError)
def test_zoom_all_will_fail():
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
m.zoom_all()
-
+
def test_zoom_all_will_work_with_max_extent():
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
@@ -23,14 +23,14 @@ def test_zoom_all_will_work_with_max_extent():
m.maximum_extent = merc_bounds
m.zoom_all()
eq_(m.envelope(),merc_bounds)
-
+
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
merc_bounds = mapnik.Box2d(-20037508.34,-20037508.34,20037508.34,20037508.34)
m.zoom_to_box(merc_bounds)
eq_(m.envelope(),merc_bounds)
-
-
+
+
def test_visual_zoom_all_rendering1():
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/wgs842merc_reprojection.xml')
@@ -44,7 +44,7 @@ def test_visual_zoom_all_rendering1():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
-
+
def test_visual_zoom_all_rendering2():
m = mapnik.Map(512,512)
mapnik.load_map(m,'../data/good_maps/merc2wgs84_reprojection.xml')
@@ -56,7 +56,7 @@ def test_visual_zoom_all_rendering2():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
-
+
# maximum-extent read from map.xml
def test_visual_zoom_all_rendering3():
m = mapnik.Map(512,512)
@@ -69,7 +69,7 @@ def test_visual_zoom_all_rendering3():
im.save(actual)
expected_im = mapnik.Image.open(expected)
eq_(im.tostring(),expected_im.tostring(), 'failed comparing actual (%s) and expected (%s)' % (actual,'tests/python_tests/'+ expected))
-
+
# no maximum-extent
def test_visual_zoom_all_rendering4():
m = mapnik.Map(512,512)
View
2  tests/python_tests/shapefile_test.py
@@ -11,7 +11,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'shape' in mapnik.DatasourceCache.instance().plugin_names():
+if 'shape' in mapnik.DatasourceCache.plugin_names():
# Shapefile initialization
def test_shapefile_init():
View
4 tests/python_tests/sqlite_rtree_test.py
@@ -22,7 +22,7 @@ def create_ds():
ds = mapnik.SQLite(file=DB,table=TABLE)
fs = ds.all_features()
-if 'sqlite' in mapnik.DatasourceCache.instance().plugin_names():
+if 'sqlite' in mapnik.DatasourceCache.plugin_names():
def test_rtree_creation():
@@ -47,7 +47,7 @@ def test_rtree_creation():
conn.commit()
eq_(cur.fetchone()[0],TOTAL)
except sqlite3.OperationalError:
- # don't worry about testing # of index records if
+ # don't worry about testing # of index records if
# python's sqlite module does not support rtree
pass
cur.close()
View
2  tests/python_tests/sqlite_test.py
@@ -10,7 +10,7 @@ def setup():
# from another directory we need to chdir()
os.chdir(execution_path('.'))
-if 'sqlite' in mapnik.DatasourceCache.instance().plugin_names():
+if 'sqlite' in mapnik.DatasourceCache.plugin_names():
def test_attachdb_with_relative_file():
# The point table and index is in the qgis_spatiallite.sqlite
Please sign in to comment.
Something went wrong with that request. Please try again.