/
HadoopLayerCopier.scala
74 lines (63 loc) · 2.55 KB
/
HadoopLayerCopier.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
/*
* Copyright 2016 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package geotrellis.spark.io.hadoop
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.avro._
import geotrellis.spark.io.index.KeyIndex
import geotrellis.spark.io.json._
import geotrellis.util._
import org.apache.avro.Schema
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import spray.json.JsonFormat
import spray.json.DefaultJsonProtocol._
import scala.reflect.ClassTag
class HadoopLayerCopier(
rootPath: Path, val attributeStore: AttributeStore)
(implicit sc: SparkContext) extends LayerCopier[LayerId] {
def copy[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
V: AvroRecordCodec: ClassTag,
M: JsonFormat: GetComponent[?, Bounds[K]]
](from: LayerId, to: LayerId): Unit = {
if (!attributeStore.layerExists(from)) throw new LayerNotFoundError(from)
if (attributeStore.layerExists(to)) throw new LayerExistsError(to)
val LayerAttributes(header, metadata, keyIndex, writerSchema) = try {
attributeStore.readLayerAttributes[HadoopLayerHeader, M, K](from)
} catch {
case e: AttributeNotFoundError => throw new LayerReadError(from).initCause(e)
}
val newPath = new Path(rootPath, s"${to.name}/${to.zoom}")
HdfsUtils.copyPath(new Path(header.path), newPath, sc.hadoopConfiguration)
attributeStore.writeLayerAttributes(
to, header.copy(path = newPath.toUri), metadata, keyIndex, writerSchema
)
}
}
object HadoopLayerCopier {
def apply(
rootPath: Path,
attributeStore: AttributeStore
)(implicit sc: SparkContext): HadoopLayerCopier =
new HadoopLayerCopier(rootPath, attributeStore)
def apply(rootPath: Path)(implicit sc: SparkContext): HadoopLayerCopier =
apply(rootPath, HadoopAttributeStore(rootPath, sc.hadoopConfiguration))
def apply(attributeStore: HadoopAttributeStore)(implicit sc: SparkContext): HadoopLayerCopier =
apply(attributeStore.rootPath, attributeStore)
}