forked from USACE/mcat-ras
/
tools.go
291 lines (246 loc) · 8.81 KB
/
tools.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
package pgdb
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"log"
"path/filepath"
"strconv"
"strings"
"github.com/Dewberry/mcat-ras/config"
ras "github.com/Dewberry/mcat-ras/tools"
"github.com/go-errors/errors" // warning: replaces standard errors
"github.com/jmoiron/sqlx"
)
type ETLMetaData struct {
ModelName string `json:"model_name"`
SourcePath string `json:"source_path"`
DestinationPath string `json:"destination_path"`
ProjectionSourcePath string `json:"projection_source_path"`
}
// Get collection ID for collection whose s3 key is LIKE definition file
func getCollectionID(tx *sqlx.Tx, definitionFile string) (collectionID int, err error) {
if err := tx.Get(&collectionID, getCollectionIDSQL, definitionFile); err != nil {
return 0, errors.Wrap(err, 0)
}
return collectionID, nil
}
func getModelID(tx *sqlx.Tx, definitionFile string) (modelID int, err error) {
if err := tx.Get(&modelID, getModelIDSQL, definitionFile); err != nil {
return 0, errors.Wrap(err, 0)
}
return modelID, nil
}
// Inserts a record to the model table
func upsertModel(tx *sqlx.Tx, rm *ras.RasModel, definitionFile string, collectionID int) (modelID int, err error) {
projFileName := filepath.Base(definitionFile)
modelName := strings.TrimSuffix(projFileName, filepath.Ext(projFileName))
etlMetaRaw := ETLMetaData{ModelName: modelName, SourcePath: definitionFile}
etlMeta, err := json.Marshal(etlMetaRaw)
if err != nil {
return 0, errors.Wrap(err, 0)
}
modelMeta, err := json.Marshal(rm.Metadata)
if err != nil {
return 0, errors.Wrap(err, 0)
}
if err := tx.Get(&modelID, upsertModelSQL, collectionID, modelName, rm.Type, definitionFile, modelMeta, etlMeta); err != nil {
return 0, errors.Wrap(err, 0)
}
return modelID, nil
}
func upsertRiver(tx *sqlx.Tx, river ras.VectorFeature, geometryFileID int) (riverID int, err error) {
riverReachName := river.FeatureName
riverReach := strings.Split(riverReachName, ",")
riverName := strings.TrimSpace(riverReach[0])
reachName := strings.TrimSpace(riverReach[1])
if err := tx.Get(&riverID, upsertRiversSQL, geometryFileID, riverName, reachName, river.Geometry); err != nil {
return 0, errors.Wrap(err, 0)
}
return riverID, nil
}
// Creates Ras Model object and get Collection ID.
// Calls upsertModel to add record to database.
// Expects collection record already exist in collection table.
func upsertModelInfo(definitionFile string, ac *config.APIConfig, db *sqlx.DB) error {
ctx := context.Background()
tx, err := db.BeginTxx(ctx, nil)
defer tx.Rollback() // necessary so that transaction is not left idle if there are any errors
if err != nil {
log.Println(err)
return errors.Wrap(err, 0)
}
collectionID, err := getCollectionID(tx, definitionFile)
if err != nil {
log.Println("Collection ID:", collectionID, err)
return errors.Wrap(err, 0)
}
rm, err := ras.NewRasModel(definitionFile, *ac.FileStore)
if err != nil {
return errors.Wrap(err, 0)
}
modelID, err := upsertModel(tx, rm, definitionFile, collectionID)
if err != nil {
fmt.Println("Model ID:", modelID, "Name|", definitionFile)
log.Println("Error: ", err, "Rolling back")
return errors.Wrap(err, 0)
}
err = tx.Commit()
if err != nil {
fmt.Println("Model ID:", modelID, "Name|", definitionFile)
log.Println("Transaction Commit Error|", err)
return errors.Wrap(err, 0)
}
return nil
}
// Creates Ras Model object and get Model ID.
// Calls receiver function GeospatialData create geometry features.
// Add records to multiple tables.
// Expects model record already exist in model table.
func upsertModelGeometry(definitionFile string, ac *config.APIConfig, db *sqlx.DB) error {
ctx := context.Background()
tx, err := db.BeginTxx(ctx, nil)
defer tx.Rollback() // necessary so that transaction is not left idle if there are any errors
if err != nil {
log.Println(err)
return errors.Wrap(err, 0)
}
modelID, err := getModelID(tx, definitionFile)
fmt.Println("Model ID:", modelID, "Name|", definitionFile)
if err != nil {
log.Println(err)
return errors.Wrap(err, 0)
}
rm, err := ras.NewRasModel(definitionFile, *ac.FileStore)
if err != nil {
return errors.Wrap(err, 0)
}
if rm.IsGeospatial() {
geodata, err := rm.GeospatialData(ac.DestinationCRS)
if err != nil {
return errors.Wrap(err, 0)
}
// Iterate over geometry files
for _, geometryFile := range rm.Metadata.GeomFiles {
var geometryFileID int
var version interface{} = geometryFile.ProgramVersion
if geometryFile.ProgramVersion == "" {
version = sql.NullFloat64{Float64: 0.0, Valid: false}
} // doing this to prevent SQL error when inserting "" to a numeric field
// Add Geometry file to database
if err = tx.Get(&geometryFileID, upsertGeometrySQL,
modelID,
geometryFile.Path,
geometryFile.FileExt,
geometryFile.GeomTitle,
version,
geometryFile.Description); err != nil {
log.Println("Geometry File", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
// Iterate over features in geometry file and add to tables as needed
geomFileName := filepath.Base(geometryFile.Path)
features := geodata.Features[geomFileName]
// Create Dynamic container to map rivers/reaches with xs/banks
riverIDMap := make(map[string]int, len(features.Rivers))
// Add all rivers
for _, river := range features.Rivers {
riverID, err := upsertRiver(tx, river, geometryFileID)
if err != nil {
log.Println(err)
return errors.Wrap(err, 0)
}
riverIDMap[river.FeatureName] = riverID
}
// Add all XS
xsIDMap := make(map[string]int, len(features.XS))
for _, xs := range features.XS {
var xsID int
riverReach := xs.Fields["RiverReachName"]
cutLineProfileMatch := xs.Fields["CutLineProfileMatch"]
xsStation, err := strconv.ParseFloat(xs.FeatureName, 64)
if err != nil {
log.Println("XS", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
riverID := riverIDMap[riverReach.(string)]
if err = tx.Get(&xsID, upsertXSSQL, riverID, xsStation, cutLineProfileMatch, xs.Geometry); err != nil {
log.Println(err)
return errors.Wrap(err, 0)
}
riverReachXSName := fmt.Sprintf("%s-%s", riverReach, xs.FeatureName)
xsIDMap[riverReachXSName] = xsID
}
// Add all Banks
for _, banks := range features.Banks {
riverReachXSName := fmt.Sprintf("%s-%s", banks.Fields["RiverReachName"], banks.Fields["xsName"].(string))
xsID := xsIDMap[riverReachXSName]
bankStation, err := strconv.ParseFloat(banks.FeatureName, 64)
if err != nil {
return errors.Wrap(err, 0)
}
_, err = tx.Exec(upsertBanksSQL, xsID, bankStation, banks.Geometry)
if err != nil {
log.Println("Banks", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
}
// Create Dynamic container to map bclines with areas
areasIDMap := make(map[string]int, (len(features.StorageAreas) + len(features.TwoDAreas)))
// Add all Storage Areas
for _, storageArea := range features.StorageAreas {
var aID int
err = tx.Get(&aID, upsertAreasSQL, geometryFileID, storageArea.FeatureName, false, storageArea.Geometry)
if err != nil {
log.Println("Storage Areas", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
areasIDMap[storageArea.FeatureName] = aID
}
// Add all 2D Areas
for _, twoDArea := range features.TwoDAreas {
var aID int
err = tx.Get(&aID, upsertAreasSQL, geometryFileID, twoDArea.FeatureName, true, twoDArea.Geometry)
if err != nil {
log.Println("TwoD Areas", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
areasIDMap[twoDArea.FeatureName] = aID
}
// Add all connections
for _, conn := range features.Connections {
_, err = tx.Exec(upsertConnectionsSQL, geometryFileID, conn.FeatureName, conn.Fields["Up Area"], conn.Fields["Dn Area"], conn.Geometry)
if err != nil {
log.Println("Connections", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
}
// Add all breakLines
for _, bl := range features.BreakLines {
_, err = tx.Exec(upsertBreaklinesSQL, geometryFileID, bl.FeatureName, bl.Geometry)
if err != nil {
log.Println("Breaklines", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
}
// Add all bounbdary condition lines
for _, bcl := range features.BCLines {
areaID := areasIDMap[bcl.Fields["Area"].(string)]
_, err = tx.Exec(upsertBClinesSQL, areaID, bcl.FeatureName, bcl.Geometry)
if err != nil {
log.Println("BC Lines", geometryFile.FileExt, "|", err)
return errors.Wrap(err, 0)
}
}
}
// as there are no insert/update queries outside of the current if statement
// we are fine to commit the transaction inside the current if statement
err = tx.Commit()
if err != nil {
log.Println("Transaction Commit Error|", err)
return errors.Wrap(err, 0)
}
}
return nil
}