Skip to content

Commit

Permalink
Upgrade SQLDelight and kgtfs
Browse files Browse the repository at this point in the history
  • Loading branch information
dellisd committed Jul 27, 2023
1 parent bb8cc7b commit 3c4c0a7
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 15 deletions.
2 changes: 1 addition & 1 deletion gradle/libs.versions.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[versions]
kotlin = "1.8.10"
kotlinx-serialization = "1.5.0"
sqldelight = "2.0.0-SNAPSHOT"
sqldelight = "2.0.0"
ktlint = "0.48.2"
ktor = "2.1.2"
inject = "0.5.1"
Expand Down
5 changes: 5 additions & 0 deletions kotlin-js-store/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,11 @@
resolved "https://registry.yarnpkg.com/@jlongster/sql.js/-/sql.js-1.6.7.tgz#f5391db62c25a6ba8a162bdcfed6e3392ef2b40f"
integrity sha512-4hf0kZr5WPoirdR5hUSfQ9O0JpH/qlW1CaR2wZ6zGrDz1xjSdTPuR8AW/oXzIHnJvZSEvlcIE+dfXJZwh/Lxfw==

"@js-joda/core@3.2.0":
version "3.2.0"
resolved "https://registry.yarnpkg.com/@js-joda/core/-/core-3.2.0.tgz#3e61e21b7b2b8a6be746df1335cf91d70db2a273"
integrity sha512-PMqgJ0sw5B7FKb2d5bWYIoxjri+QlW/Pys7+Rw82jSH0QN3rB05jZ/VrrsUdh1w4+i2kw9JOejXGq/KhDOX7Kg==

"@leichtgewicht/ip-codec@^2.0.1":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package ca.derekellis.reroute.server.cmds

import ca.derekellis.kgtfs.cache.GtfsCache
import ca.derekellis.kgtfs.GtfsDb
import ca.derekellis.kgtfs.io.GtfsReader
import ca.derekellis.reroute.server.data.DataBundler
import com.github.ajalt.clikt.core.CliktCommand
Expand Down Expand Up @@ -39,7 +39,7 @@ class PreprocessCommand : CliktCommand() {
cachePath.deleteIfExists()

logger.info("Reading {} into cache at {}", source, cachePath)
val cache = GtfsCache.fromReader(cachePath, GtfsReader(source))
val cache = GtfsDb.fromReader(GtfsReader(source), into = cachePath)

logger.info("Bundling data into {}/{}.json", output, name)
val bundler = DataBundler()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package ca.derekellis.reroute.server.data

import ca.derekellis.kgtfs.cache.GtfsCache
import ca.derekellis.kgtfs.csv.Calendar
import ca.derekellis.kgtfs.ExperimentalKgtfsApi
import ca.derekellis.kgtfs.GtfsDb
import ca.derekellis.kgtfs.csv.ServiceId
import ca.derekellis.kgtfs.csv.StopId
import ca.derekellis.kgtfs.ext.TripSequence
import ca.derekellis.kgtfs.ext.lineString
Expand All @@ -12,16 +13,19 @@ import ca.derekellis.reroute.models.Stop
import ca.derekellis.reroute.models.StopInTimetable
import ca.derekellis.reroute.models.TransitDataBundle
import io.github.dellisd.spatialk.geojson.Position
import org.jetbrains.exposed.sql.select
import org.jetbrains.exposed.sql.selectAll
import org.jgrapht.alg.cycle.CycleDetector
import org.jgrapht.graph.DefaultDirectedGraph
import org.jgrapht.graph.DefaultEdge
import org.jgrapht.traverse.TopologicalOrderIterator

class DataBundler {
fun assembleDataBundle(gtfs: GtfsCache): TransitDataBundle = gtfs.read {
val stops = stops.all().associateBy { it.id }
val trips = trips.all().associateBy { it.id }
val calendars = calendars.all().map(Calendar::serviceId).toSet()
@OptIn(ExperimentalKgtfsApi::class)
fun assembleDataBundle(gtfs: GtfsDb): TransitDataBundle = gtfs.query {
val stops = Stops.selectAll().map(Stops.Mapper).associateBy { it.id }
val trips = Trips.selectAll().map(Trips.Mapper).associateBy { it.id }
val calendars = Calendars.selectAll().map { ServiceId(it[Calendars.serviceId]) }.toSet()
// Get unique sequences
val sequences = uniqueTripSequences(calendars)

Expand All @@ -32,12 +36,12 @@ class DataBundler {
}.toList()

val processedRoutes = grouped.flatMap { (key, value) ->
val route = routes.byId(value.first().gtfsId)
val route = Routes.select { Routes.id eq value.first().gtfsId.value }.map(Routes.mapper).single()
value.mapIndexed { i, sequence ->
val trip = trips.getValue(sequence.trips.keys.first())
val id = "$key#$i"
// TODO: Develop a better way to extract headsign values
val shape = shapes.byId(trip.shapeId!!)
val shape = Shapes.select { Shapes.id eq trip.shapeId!!.value }.map(Shapes.Mapper)
Route(
id,
route.id.value,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,16 @@ class DatabaseHelper(private val worker: Worker, private val client: RerouteClie
private suspend fun migrateIfNeeded() {
val oldVersion =
driver.awaitQuery(null, "PRAGMA $VERSION_PRAGMA", mapper = { cursor ->
if (cursor.next()) {
cursor.getLong(0)?.toInt()
if (cursor.next().value) {
cursor.getLong(0)
} else {
null
}
}, 0) ?: 0
}, 0) ?: 0L

val newVersion = RerouteDatabase.Schema.version

if (oldVersion == 0) {
if (oldVersion == 0L) {
RerouteDatabase.Schema.awaitCreate(driver)
driver.await(null, "PRAGMA $VERSION_PRAGMA=$newVersion", 0)
} else if (oldVersion < newVersion) {
Expand Down

0 comments on commit 3c4c0a7

Please sign in to comment.