Skip to content

Commit

Permalink
Merge c25fcab into 55f80f3
Browse files Browse the repository at this point in the history
  • Loading branch information
Mike Graves committed Jul 18, 2019
2 parents 55f80f3 + c25fcab commit 703f41a
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 17 deletions.
10 changes: 3 additions & 7 deletions slingshot/app.py
Expand Up @@ -239,15 +239,11 @@ def publish_layer(bucket, key, geoserver, solr, destination, ogc_proxy,
return layer.name


def publishable_layers(bucket, dynamo):
s3 = session().resource("s3")
db = session().resource("dynamodb")
uploads = s3.Bucket(bucket)
table = db.Table(dynamo)
for page in uploads.objects.pages():
def publishable_layers(bucket, dynamodb):
for page in bucket.objects.pages():
for obj in page:
name = os.path.splitext(obj.key)[0]
res = table.get_item(Key={"LayerName": name})
res = dynamodb.get_item(Key={"LayerName": name})
layer = res.get("Item")
if layer:
l_mod = datetime.fromisoformat(layer['LastMod'])
Expand Down
18 changes: 14 additions & 4 deletions slingshot/cli.py
Expand Up @@ -106,6 +106,8 @@ def initialize(geoserver, geoserver_user, geoserver_password, db_host, db_port,
@click.option('--solr-password', envvar='SOLR_PASSWORD', help="Solr password")
@click.option('--ogc-proxy', envvar='OGC_PROXY',
help="OGC proxy URL")
@click.option('--aws-region', envvar='AWS_DEFAULT_REGION', default='us-east-1',
help="AWS region")
@click.option('--s3-endpoint', envvar='S3_ENDPOINT',
help="If using an alternative S3 service like Minio, set this "
"to the base URL for that service")
Expand All @@ -116,6 +118,9 @@ def initialize(geoserver, geoserver_user, geoserver_password, db_host, db_port,
"for more information.")
@click.option('--dynamo-table',
help="Name of DynamoDB table for tracking state of layer")
@click.option('--dynamo-endpoint',
help="If using an alternative DynamoDB service like moto, set "
"this to the base URL for that service")
@click.option('--upload-bucket', help="Name of S3 bucket for uploaded layers")
@click.option('--storage-bucket', help="Name of S3 bucket for stored layers")
@click.option('--num-workers', default=1,
Expand All @@ -125,8 +130,9 @@ def initialize(geoserver, geoserver_user, geoserver_password, db_host, db_port,
def publish(layers, db_uri, db_user, db_password, db_host, db_port, db_name,
db_schema, geoserver, geoserver_user,
geoserver_password, solr, solr_user, solr_password,
s3_endpoint, s3_alias, dynamo_table, upload_bucket,
storage_bucket, num_workers, publish_all, ogc_proxy):
s3_endpoint, s3_alias, dynamo_endpoint, dynamo_table, aws_region,
upload_bucket, storage_bucket, num_workers, publish_all,
ogc_proxy):
if not any((layers, publish_all)) or all((layers, publish_all)):
raise click.ClickException(
"You must specify either one or more uploaded layer package names "
Expand All @@ -144,9 +150,13 @@ def publish(layers, db_uri, db_user, db_password, db_host, db_port, db_name,
geo_svc = GeoServer(geoserver, HttpSession(), auth=geo_auth,
s3_alias=s3_alias)
solr_svc = Solr(solr, HttpSession(), auth=solr_auth)
dynamodb = session().resource("dynamodb").Table(dynamo_table)
dynamo = session().resource("dynamodb", endpoint_url=dynamo_endpoint,
region_name=aws_region)
s3 = session().resource("s3", endpoint_url=s3_endpoint,
region_name=aws_region)
dynamodb = dynamo.Table(dynamo_table)
if publish_all:
work = publishable_layers(upload_bucket, dynamo_table)
work = publishable_layers(s3.Bucket(upload_bucket), dynamodb)
else:
work = layers
with ThreadPoolExecutor(max_workers=num_workers) as executor:
Expand Down
15 changes: 9 additions & 6 deletions tests/test_app.py
Expand Up @@ -119,17 +119,19 @@ def test_publish_layer_uses_ogc_proxy_url(s3, shapefile, db):


def test_publishable_layers_includes_new_layer(s3, dynamo_table):
s3.Bucket("upload").put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers("upload", "slingshot"))
upload = s3.Bucket("upload")
upload.put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers(upload, dynamo_table))
assert layers.pop() == "foo.zip"


def test_publishable_layers_includes_updated_layer(s3, dynamo_table):
awhile_ago = datetime(1980, 1, 1).isoformat()
dynamo_table.put_item(Item={"LayerName": "foo",
"LastMod": awhile_ago})
s3.Bucket("upload").put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers("upload", "slingshot"))
upload = s3.Bucket("upload")
upload.put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers(upload, dynamo_table))
assert layers.pop() == "foo.zip"


Expand All @@ -138,6 +140,7 @@ def test_publishable_layers_skips_old_layer(s3, dynamo_table):
# This test will fail in the year 2080. Probably ok. I'll be dead anyways.
dynamo_table.put_item(Item={"LayerName": "foo",
"LastMod": the_future})
s3.Bucket("upload").put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers("upload", "slingshot"))
upload = s3.Bucket("upload")
upload.put_object(Key="foo.zip", Body="Some data")
layers = list(publishable_layers(upload, dynamo_table))
assert not layers

0 comments on commit 703f41a

Please sign in to comment.