Skip to content

Commit

Permalink
Apply go lints
Browse files Browse the repository at this point in the history
  • Loading branch information
Pi-Cla committed May 7, 2024
1 parent c8d4e95 commit ddfffcb
Show file tree
Hide file tree
Showing 12 changed files with 65 additions and 64 deletions.
9 changes: 4 additions & 5 deletions processors/agencyduplicateremover.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"hash/fnv"
"os"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// AgencyDuplicateRemover merges semantically equivalent routes
Expand Down Expand Up @@ -116,9 +117,7 @@ func (adr *AgencyDuplicateRemover) combineAgencies(feed *gtfsparser.Feed, agenci
}
}

for _, attr := range a.Attributions {
ref.Attributions = append(ref.Attributions, attr)
}
ref.Attributions = append(ref.Attributions, a.Attributions...)

for _, fa := range fareattrs[a] {
if fa.Agency == a {
Expand Down
7 changes: 4 additions & 3 deletions processors/frequencyminizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,14 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"math"
"os"
"sort"
"strconv"
"sync"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// FrequencyMinimizer minimizes trips, stop_times and frequencies by searching optimal covers for trip times.
Expand Down Expand Up @@ -123,7 +124,7 @@ func (m FrequencyMinimizer) Run(feed *gtfsparser.Feed) {
curTrip = new(gtfs.Trip)

var newID string
for true {
for {
newID = t.Id + "_" + strconv.FormatInt(int64(suffixC), 10)
if _, in := feed.Trips[newID]; in {
suffixC++
Expand Down
13 changes: 7 additions & 6 deletions processors/idminimizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"os"
"strconv"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// IDMinimizer minimizes IDs by replacing them be continuous integer
Expand Down Expand Up @@ -246,22 +247,22 @@ func (minimizer IDMinimizer) minimizeStopIds(feed *gtfsparser.Feed) {
func (minimizer IDMinimizer) minimizeAttributionIds(feed *gtfsparser.Feed) {
var idCount int64 = 1

for i, _ := range feed.Attributions {
for i := range feed.Attributions {
newId := minimizer.Prefix + strconv.FormatInt(idCount, minimizer.Base)
feed.Attributions[i].Id = newId
idCount = idCount + 1
}

for _, ag := range feed.Agencies {
for i, _ := range ag.Attributions {
for i := range ag.Attributions {
newId := minimizer.Prefix + strconv.FormatInt(idCount, minimizer.Base)
ag.Attributions[i].Id = newId
idCount = idCount + 1
}
}

for _, r := range feed.Routes {
for i, _ := range r.Attributions {
for i := range r.Attributions {
newId := minimizer.Prefix + strconv.FormatInt(idCount, minimizer.Base)
r.Attributions[i].Id = newId
idCount = idCount + 1
Expand All @@ -272,7 +273,7 @@ func (minimizer IDMinimizer) minimizeAttributionIds(feed *gtfsparser.Feed) {
if t.Attributions == nil {
continue
}
for i, _ := range *t.Attributions {
for i := range *t.Attributions {
newId := minimizer.Prefix + strconv.FormatInt(idCount, minimizer.Base)
(*t.Attributions)[i].Id = newId
idCount = idCount + 1
Expand Down
9 changes: 4 additions & 5 deletions processors/routeduplicateremover.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ package processors
import (
"encoding/binary"
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"hash/fnv"
"os"
"unsafe"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// RouteDuplicateRemover merges semantically equivalent routes
Expand Down Expand Up @@ -187,9 +188,7 @@ func (rdr RouteDuplicateRemover) combineRoutes(feed *gtfsparser.Feed, routes []*
}
}

for _, attr := range r.Attributions {
ref.Attributions = append(ref.Attributions, attr)
}
ref.Attributions = append(ref.Attributions, r.Attributions...)

// delete every fare rule that contains this route
for _, fa := range feed.FareAttributes {
Expand Down
7 changes: 4 additions & 3 deletions processors/servicecaldatesremover.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ package processors
import (
"errors"
"fmt"
"os"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"os"
)

// ServiceCalDatesRemover removes any entry in calendar_dates.txt by
Expand Down Expand Up @@ -219,7 +220,7 @@ func (sm *ServiceCalDatesRem) freeTripId(feed *gtfsparser.Feed, prefix string) s
return tid
}
}
panic(errors.New("Ran out of free trip ids."))
panic(errors.New("ran out of free trip ids"))
}

// get a free service id with the given prefix
Expand All @@ -231,5 +232,5 @@ func (sm *ServiceCalDatesRem) freeServiceId(feed *gtfsparser.Feed, prefix string
return sid
}
}
panic(errors.New("Ran out of free service ids."))
panic(errors.New("ran out of free service ids"))
}
13 changes: 7 additions & 6 deletions processors/serviceduplicateremover.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@ package processors
import (
"encoding/binary"
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"hash/fnv"
"os"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// ServiceDuplicateRemover removes duplicate services. Services are considered equal if they
Expand Down Expand Up @@ -47,7 +48,7 @@ func (sdr ServiceDuplicateRemover) Run(feed *gtfsparser.Feed) {
}

sc := amaps[s]
eqServices := sdr.getEquivalentServices(s, amaps, feed, chunks[sc.hash])
eqServices := sdr.getEquivalentServices(s, amaps, chunks[sc.hash])

if len(eqServices) > 0 {
sdr.combineServices(feed, append(eqServices, s), trips)
Expand All @@ -65,7 +66,7 @@ func (sdr ServiceDuplicateRemover) Run(feed *gtfsparser.Feed) {
}

// Return the services that are equivalent to service
func (m ServiceDuplicateRemover) getEquivalentServices(serv *gtfs.Service, amaps map[*gtfs.Service]ServiceCompressed, feed *gtfsparser.Feed, chunks [][]*gtfs.Service) []*gtfs.Service {
func (m ServiceDuplicateRemover) getEquivalentServices(serv *gtfs.Service, amaps map[*gtfs.Service]ServiceCompressed, chunks [][]*gtfs.Service) []*gtfs.Service {
rets := make([][]*gtfs.Service, len(chunks))
sem := make(chan empty, len(chunks))

Expand Down Expand Up @@ -127,7 +128,7 @@ func (m ServiceDuplicateRemover) getActiveMaps(feed *gtfsparser.Feed) map[*gtfs.
cur.start = first
cur.end = last
cur.activeMap = sm.getActiveOnMap(first.GetTime(), last.GetTime(), s)
cur.hash = m.serviceHash(cur.activeMap, first, last, s)
cur.hash = m.serviceHash(cur.activeMap, first, last)

rets[j][s] = cur
}
Expand Down Expand Up @@ -177,7 +178,7 @@ func (m ServiceDuplicateRemover) getServiceChunks(feed *gtfsparser.Feed, amaps m
return chunks
}

func (m ServiceDuplicateRemover) serviceHash(active []bool, first gtfs.Date, last gtfs.Date, s *gtfs.Service) uint32 {
func (m ServiceDuplicateRemover) serviceHash(active []bool, first gtfs.Date, last gtfs.Date) uint32 {
h := fnv.New32a()

bls := boolsToBytes(active)
Expand Down
9 changes: 5 additions & 4 deletions processors/serviceminimizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"os"
"time"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// ServiceMinimizer minimizes services by finding optimal calendar.txt and
Expand Down Expand Up @@ -162,7 +163,7 @@ out:
continue
}

c := sm.countExceptions(service, activeOn, d, startDiff, endDiff, a, b, e)
c := sm.countExceptions(activeOn, d, startDiff, endDiff, a, b, e)

if c < e {
e = c
Expand All @@ -182,7 +183,7 @@ out:
sm.updateService(service, bestMap, bestA, bestB, startTime, endTime, start, end)
}

func (sm ServiceMinimizer) countExceptions(s *gtfs.Service, actmap []bool, bm uint, startDiff int, endDiff int, a int, b int, max uint) uint {
func (sm ServiceMinimizer) countExceptions(actmap []bool, bm uint, startDiff int, endDiff int, a int, b int, max uint) uint {
ret := uint(0)
l := len(actmap)

Expand Down
19 changes: 10 additions & 9 deletions processors/servicenonoverlapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"golang.org/x/exp/slices"
"os"
"sort"
"strconv"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"golang.org/x/exp/slices"
)

type DayType struct {
Expand Down Expand Up @@ -54,16 +55,16 @@ func (sm ServiceNonOverlapper) Run(feed *gtfsparser.Feed) {
}
}

for wd, _ := range days {
for day, _ := range days[wd] {
for wd := range days {
for day := range days[wd] {
sort.Slice(days[wd][day], func(i, j int) bool {
return days[wd][day][i].Id < days[wd][day][j].Id
})
}
}

// collect day types
for wd, _ := range days {
for wd := range days {
for day, trips := range days[wd] {
found := false
for i, existing := range day_types[wd] {
Expand All @@ -82,7 +83,7 @@ func (sm ServiceNonOverlapper) Run(feed *gtfsparser.Feed) {
return len(day_types[wd][i].Dates) > len(day_types[wd][j].Dates)
})

for i, _ := range day_types[wd] {
for i := range day_types[wd] {
sort.Slice(day_types[wd][i].Dates, func(a, b int) bool {
return day_types[wd][i].Dates[a].GetTime().Before(day_types[wd][i].Dates[b].GetTime())
})
Expand All @@ -95,7 +96,7 @@ func (sm ServiceNonOverlapper) Run(feed *gtfsparser.Feed) {
feed.StopTimesAddFlds = make(map[string]map[string]map[int]string)

// write services
for wd, _ := range days {
for wd := range days {
for _, t := range day_types[wd] {
weeknums := make([]int, 0)
for _, d := range t.Dates {
Expand All @@ -108,7 +109,7 @@ func (sm ServiceNonOverlapper) Run(feed *gtfsparser.Feed) {
if len(day_types[wd]) > 1 {
id += " ("

for i, _ := range weeknums {
for i := range weeknums {
if i == 0 {
id += sm.YearWeekName + strconv.Itoa((weeknums[i]))
continue
Expand Down
9 changes: 5 additions & 4 deletions processors/shapeduplicateremover.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@ package processors

import (
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"math"
"os"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// ShapeDuplicateRemover removes duplicate shapes
Expand Down Expand Up @@ -70,7 +71,7 @@ func (sdr ShapeDuplicateRemover) Run(feed *gtfsparser.Feed) {
if sdr.deleted[s] {
continue
}
eqShps := sdr.getEquShps(s, feed, chunkIdxs)
eqShps := sdr.getEquShps(s, chunkIdxs)

if len(eqShps) > 0 {
sdr.combineShapes(feed, append(eqShps, s), tidx)
Expand All @@ -83,7 +84,7 @@ func (sdr ShapeDuplicateRemover) Run(feed *gtfsparser.Feed) {
}

// Return all shapes that are equivalent (within MaxEqDist) to shape
func (sdr *ShapeDuplicateRemover) getEquShps(shp *gtfs.Shape, feed *gtfsparser.Feed, idxs []*ShapeIdx) []*gtfs.Shape {
func (sdr *ShapeDuplicateRemover) getEquShps(shp *gtfs.Shape, idxs []*ShapeIdx) []*gtfs.Shape {
rets := make([][]*gtfs.Shape, len(idxs))
sem := make(chan empty, len(idxs))

Expand Down
5 changes: 3 additions & 2 deletions processors/shapeidx.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@
package processors

import (
gtfs "github.com/public-transport/gtfsparser/gtfs"
"math"

gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// ShapeIdx stores objects for fast nearest-neighbor
Expand Down Expand Up @@ -137,7 +138,7 @@ func (gi *ShapeIdx) isects(x0, y0, x1, y1 float64, x, y uint) bool {
ocode1 := gi.ocode(x1, y1, xmin, ymin, xmax, ymax)
isect := false

for true {
for {
if (ocode0 | ocode1) == 0 {
return true
} else if (ocode0 & ocode1) != 0 {
Expand Down
8 changes: 5 additions & 3 deletions processors/shapesnapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@ package processors
import (
"errors"
"fmt"
"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
"math"
"os"

"github.com/public-transport/gtfsparser"
gtfs "github.com/public-transport/gtfsparser/gtfs"
)

// ShapeMinimizer minimizes shapes.
Expand Down Expand Up @@ -88,6 +89,7 @@ func (sm ShapeSnapper) Run(feed *gtfsparser.Feed) {
func (sm *ShapeSnapper) snapTo(stop *gtfs.Stop, distT float32, shape *gtfs.Shape) (float64, float64) {
shp := sm.mercs[shape]

// TODO No value is equal to NaN not even NaN itself so is this if statement redundant?
if float64(distT) != math.NaN() {
for i := 1; i < len(shape.Points); i++ {
if shape.Points[i].Dist_traveled <= distT && i < len(shape.Points) - 1 && shape.Points[i+1].Dist_traveled >= distT {
Expand Down Expand Up @@ -134,5 +136,5 @@ func (sm *ShapeSnapper) freeStopId(feed *gtfsparser.Feed, suffix string) string
return sid
}
}
panic(errors.New("Ran out of free stop ids."))
panic(errors.New("ran out of free stop ids"))
}
Loading

0 comments on commit ddfffcb

Please sign in to comment.