123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880 |
- // SPDX-FileCopyrightText: Adam Evyčędo
- //
- // SPDX-License-Identifier: AGPL-3.0-or-later
- package traffic
- import (
- "apiote.xyz/p/szczanieckiej/config"
- "apiote.xyz/p/szczanieckiej/file"
- traffic_errors "apiote.xyz/p/szczanieckiej/traffic/errors"
- "apiote.xyz/p/szczanieckiej/transformers"
- "errors"
- "fmt"
- "io"
- "log"
- "net"
- "os"
- "path/filepath"
- "slices"
- "strings"
- "time"
- "golang.org/x/text/language"
- "golang.org/x/text/runes"
- "golang.org/x/text/transform"
- "git.sr.ht/~sircmpwn/go-bare"
- "github.com/dhconnelly/rtreego"
- "github.com/sahilm/fuzzy"
- "notabug.org/apiote/gott"
- )
- type _Result struct {
- Filename string
- Offset uint
- Date time.Time
- LineID string
- TimetableHome string
- Calendar []Schedule
- DeparturesType DeparturesType
- Vehicles Vehicles
- Feed Feed
- Ctx Context
- Traffic *Traffic
- Languages []language.Tag
- Location *time.Location
- Datetime time.Time
- MinuteB4Datetime time.Time
- TodaySchedule map[string]struct{}
- YesterdaySchedule map[string]struct{}
- file *os.File
- TripsFile *os.File
- Trips map[string]Trip
- Departures []DepartureRealtime
- DeparturesSchedule []Departure
- Stop Stop
- Line Line
- Trip Trip
- FeedInfo FeedInfo
- }
- func isTimeout(err error) bool {
- var e net.Error
- return errors.As(err, &e) && e.Timeout()
- }
- func CleanQuery(query string, feed Feed) (string, error) {
- t := transform.Chain(runes.Remove(runes.Predicate(transformers.IsNonAlphanum)), feed.Transformer())
- queryCleaned, _, err := transform.String(t, query)
- return strings.ToLower(queryCleaned), err
- }
- func findSchedule(home string, time time.Time, calendar []Schedule) (map[string]struct{},
- error) {
- schedules := map[string]struct{}{}
- weekday := uint8(1 << time.Weekday())
- date := time.Format(DateFormat)
- for _, schedule := range calendar {
- for _, dateRange := range schedule.DateRanges {
- if dateRange.Start <= date && date <= dateRange.End &&
- (dateRange.Weekdays&weekday != 0) {
- schedules[schedule.Id] = struct{}{}
- break
- }
- }
- }
- var err error
- if len(schedules) == 0 {
- err = traffic_errors.NoSchedule{Date: date}
- }
- return schedules, err
- }
- func openFile(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- file, err := os.Open(filepath.Join(result.TimetableHome, result.Filename))
- result.file = file
- return result, err
- }
- func seek(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- _, err := result.file.Seek(int64(result.Offset), 0)
- return result, err
- }
- func unmarshalStop(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- result.Stop = Stop{}
- err := bare.UnmarshalReader(result.file, &result.Stop)
- result.file.Close()
- return result, err
- }
- func unmarshalFeedInfo(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- result.FeedInfo = FeedInfo{}
- err := bare.UnmarshalReader(result.file, &result.FeedInfo)
- result.file.Close()
- return result, err
- }
- func unmarshalLine(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- result.Line = Line{}
- err := bare.UnmarshalReader(result.file, &result.Line)
- result.file.Close()
- return result, err
- }
- func unmarshalTrip(input ...interface{}) (interface{}, error) {
- result := input[0].(_Result)
- result.Trip = Trip{}
- err := bare.UnmarshalReader(result.file, &result.Trip)
- return result, err
- }
- func GetTimeWithDelay(departure DepartureRealtime) time.Time {
- if departure.Update.TimeUTC != "" {
- updateTimeUTC, err := time.Parse("150405", departure.Update.Time)
- if err != nil {
- panic("departure update time ‘" + departure.Update.Time + "’ not in format 150405")
- }
- updateTime := time.Date(departure.Time.Year(), departure.Time.Month(), departure.Time.Day(), updateTimeUTC.Hour(), updateTimeUTC.Minute(), updateTimeUTC.Second(), 0, time.UTC)
- return updateTime.In(departure.Time.Location())
- } else if departure.Update.Time != "" {
- updateTime, err := time.Parse("150405", departure.Update.Time)
- if err != nil {
- panic("departure update time ‘" + departure.Update.Time + "’ not in format 150405")
- }
- updateDateTime := time.Date(departure.Time.Year(), departure.Time.Month(), departure.Time.Day(), updateTime.Hour(), updateTime.Minute(), updateTime.Second(), 0, departure.Time.Location())
- return updateDateTime
- } else {
- delay := int(departure.Update.Delay)
- return departure.Time.Add(time.Duration(delay) * time.Second)
- }
- }
- func unmarshalCodeIndex(timetableHome, filename string) (CodeIndex, error) {
- ix := CodeIndex{}
- ixFile, err := os.Open(filepath.Join(timetableHome, filename))
- if err != nil {
- return ix, fmt.Errorf("while opening file: %w", err)
- }
- defer ixFile.Close()
- r := bare.NewReader(ixFile)
- num, err := r.ReadUint()
- if err != nil {
- return ix, fmt.Errorf("while reading length: %w", err)
- }
- for i := uint64(0); i < num; i++ {
- k, err := r.ReadString()
- if err != nil {
- return ix, fmt.Errorf("while reading key at %d: %w", i, err)
- }
- v, err := r.ReadUint()
- if err != nil {
- return ix, fmt.Errorf("while reading value at %d: %w", i, err)
- }
- ix[k] = uint(v)
- }
- return ix, nil
- }
- func unmarshalNameIndex(timetableHome, filename string) (NameIndex, error) {
- ix := NameIndex{}
- ixFile, err := os.Open(filepath.Join(timetableHome, filename))
- if err != nil {
- return ix, fmt.Errorf("while opening file: %w", err)
- }
- defer ixFile.Close()
- for err == nil {
- nameOffset := NameOffset{}
- err = bare.UnmarshalReader(ixFile, &nameOffset)
- if err != nil {
- if err == io.EOF {
- break
- } else {
- return ix, fmt.Errorf("while unmarshaling: %w", err)
- }
- }
- ix = append(ix, nameOffset)
- }
- return ix, nil
- }
- func unmarshalStopCodeIndex(timetableHome string) (CodeIndex, error) {
- return unmarshalCodeIndex(timetableHome, "ix_stop_codes.bare")
- }
- func unmarshalLineCodeIndex(timetableHome string) (CodeIndex, error) {
- return unmarshalCodeIndex(timetableHome, "ix_line_codes.bare")
- }
- func unmarshalLineIndex(timetableHome string) (NameIndex, error) {
- return unmarshalNameIndex(timetableHome, "ix_lines.bare")
- }
- func unmarshalStopNameIndex(timetableHome string) (NameIndex, error) {
- return unmarshalNameIndex(timetableHome, "ix_stop_names.bare")
- }
- func unmarshalTripIndex(timetableHome string) (NameIndex, error) {
- return unmarshalNameIndex(timetableHome, "ix_trips.bare")
- }
- func readIndexes(feedHome string, versions []Version) (FeedCodeIndex,
- FeedNameIndex, FeedNameIndex, FeedCodeIndex, FeedNameIndex, error) {
- codeIndex := FeedCodeIndex{}
- nameIndex := FeedNameIndex{}
- lineIndex := FeedNameIndex{}
- tripIndex := FeedNameIndex{}
- lineIdIndex := FeedCodeIndex{}
- for _, v := range versions {
- validity := Validity(v.String())
- timetableHome := filepath.Join(feedHome, string(validity))
- cIx, err := unmarshalStopCodeIndex(timetableHome)
- if err != nil {
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex,
- fmt.Errorf("while unmarshalling code index: %w", err)
- }
- liIx, err := unmarshalLineCodeIndex(timetableHome)
- if err != nil {
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex,
- fmt.Errorf("while unmarshalling code index: %w", err)
- }
- nIx, err := unmarshalStopNameIndex(timetableHome)
- if err != nil {
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex,
- fmt.Errorf("while unmarshalling name index: %w", err)
- }
- lIx, err := unmarshalLineIndex(timetableHome)
- if err != nil {
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex,
- fmt.Errorf("while unmarshalling line index: %w", err)
- }
- tIx, err := unmarshalTripIndex(timetableHome)
- if err != nil {
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex,
- fmt.Errorf("while unmarshalling trip index: %w", err)
- }
- codeIndex[validity] = cIx
- nameIndex[validity] = nIx
- lineIndex[validity] = lIx
- lineIdIndex[validity] = liIx
- tripIndex[validity] = tIx
- }
- return codeIndex, nameIndex, lineIndex, lineIdIndex, tripIndex, nil
- }
- func unmarshalCalendar(timetableHome string) ([]Schedule, error) {
- calendar := []Schedule{}
- calendarFile, err := os.Open(filepath.Join(timetableHome, "calendar.bare"))
- if err != nil {
- return calendar, fmt.Errorf("while opening file: %w", err)
- }
- defer calendarFile.Close()
- for err == nil {
- schedule := Schedule{}
- err = bare.UnmarshalReader(calendarFile, &schedule)
- if err != nil {
- if err == io.EOF {
- break
- } else {
- return calendar, fmt.Errorf("while unmarshaling: %w", err)
- }
- }
- calendar = append(calendar, schedule)
- }
- return calendar, nil
- }
- func readCalendar(feedHome string, versions []Version) (FeedCalendar, error) {
- calendars := FeedCalendar{}
- for _, v := range versions {
- validity := Validity(v.String())
- timetableHome := filepath.Join(feedHome, string(validity))
- schedule, err := unmarshalCalendar(timetableHome)
- if err != nil {
- return calendars, fmt.Errorf("while unmarshaling for %s: %w", v, err)
- }
- calendars[validity] = schedule
- }
- return calendars, nil
- }
- func unmarshalVehicles(timetableHome string) (Vehicles, error) {
- vehicles := Vehicles{}
- vehiclesFile, err := os.Open(filepath.Join(timetableHome, "vehicles.bare"))
- if err != nil {
- return vehicles, fmt.Errorf("while opening file: %w", err)
- }
- defer vehiclesFile.Close()
- for err == nil {
- vehicle := Vehicle{}
- err = bare.UnmarshalReader(vehiclesFile, &vehicle)
- if err != nil {
- if err == io.EOF {
- break
- } else {
- return vehicles, fmt.Errorf("while unmarshaling: %w", err)
- }
- }
- vehicles[vehicle.Id] = vehicle
- }
- return vehicles, nil
- }
- func readVehicles(feedHome string, versions []Version) (FeedVehicles, error) {
- vehicles := FeedVehicles{}
- for _, v := range versions {
- validity := Validity(v.String())
- timetableHome := filepath.Join(feedHome, string(validity))
- versionVehicles, err := unmarshalVehicles(timetableHome)
- if err != nil {
- return vehicles, fmt.Errorf("while unmarshaling for %s: %w", v, err)
- }
- vehicles[validity] = versionVehicles
- }
- return vehicles, nil
- }
- func createPositionIndex(feedHome string, versions []Version) (FeedPositionIndex, error) {
- feedPositionIndex := FeedPositionIndex{}
- for _, v := range versions {
- positionIndex := rtreego.NewTree(2, 25, 50)
- validity := Validity(v.String())
- timetableHome := filepath.Join(feedHome, string(validity))
- stopsFile, err := os.Open(filepath.Join(timetableHome, "stops.bare"))
- if err != nil {
- return feedPositionIndex, fmt.Errorf("while opening stops file: %w", err)
- }
- defer stopsFile.Close()
- for err == nil {
- stop := Stop{}
- err = bare.UnmarshalReader(stopsFile, &stop)
- if err != nil {
- if err == io.EOF {
- break
- } else {
- return feedPositionIndex, fmt.Errorf("while unmarshaling: %w", err)
- }
- }
- stop.Name = ""
- stop.NodeName = ""
- stop.ChangeOptions = nil
- stop.Zone = ""
- stop.Order = nil
- positionIndex.Insert(stop)
- feedPositionIndex[validity] = positionIndex
- }
- }
- return feedPositionIndex, nil
- }
- func EnableFeeds(cfg config.Config, traffic *Traffic) {
- feedsMap := RegisterFeeds()
- feeds := map[string]Feed{}
- for _, enabledFeed := range cfg.EnabledFeeds {
- if _, ok := feedsMap[enabledFeed]; !ok {
- log.Printf("feed %s not registered, ignoring\n", enabledFeed)
- continue
- }
- feeds[enabledFeed] = feedsMap[enabledFeed]
- }
- traffic.Feeds = feeds
- }
- func Initialise(sigChan chan os.Signal, doneChan chan bool, initedChan chan bool, cfg config.Config,
- traffic *Traffic) {
- bare.MaxMapSize(8192)
- alreadyInitialised := false
- for {
- sig := <-sigChan
- if sig == os.Interrupt {
- break
- } // else it's SIGUSR1, reloading config
- allVersions := GlobalVersions{}
- codeIndexes := GlobalCodeIndex{}
- nameIndexes := GlobalNameIndex{}
- lineIndexes := GlobalNameIndex{}
- lineIdIndexes := GlobalCodeIndex{}
- tripIndexes := GlobalNameIndex{}
- calendars := GlobalCalendar{}
- vehicles := GlobalVehicles{}
- positionIndexes := GlobalPositionIndex{}
- feedInfos := map[Validity]map[string]FeedInfo{}
- for _, feed := range traffic.Feeds {
- feedID := feed.String()
- feedHome := filepath.Join(cfg.FeedsPath, feedID)
- err := file.UnpackTraffic(cfg.FeedsPath, feedID)
- if err != nil {
- log.Printf("while unpacking TRAFFIC in feed %s: %v\n", feed, err)
- continue
- }
- feedValidities, err := ListVersions(cfg, feed)
- if err != nil {
- log.Printf("while listing TRAFFIC versions in feed %s: %v\n", feed, err)
- continue
- }
- for _, version := range feedValidities {
- feedInfo, err := getFeedInfo(cfg.FeedsPath, feedID, Validity(version))
- if err != nil {
- log.Printf("while getting feed info for feed %s, version %s: %v\n", feed, version, err)
- continue
- }
- if feedInfos[Validity(feedInfo.ValidSince+"_"+feedInfo.ValidTill)] == nil {
- feedInfos[Validity(feedInfo.ValidSince+"_"+feedInfo.ValidTill)] = map[string]FeedInfo{}
- }
- feedInfos[Validity(feedInfo.ValidSince+"_"+feedInfo.ValidTill)][feedID] = feedInfo
- }
- traffic.FeedInfos = feedInfos
- feedVersions, deletedValidities, err := CleanOldVersions(cfg, feed, traffic, feedID, feedValidities)
- if err != nil {
- log.Printf("while cleaning old TRAFFIC versions in feed %s: %v\n",
- feed, err)
- continue
- }
- for _, deletedVersion := range deletedValidities {
- delete(feedInfos, Validity(deletedVersion))
- }
- allVersions[feedID] = feedVersions
- codeIndexes[feedID], nameIndexes[feedID], lineIndexes[feedID], lineIdIndexes[feedID], tripIndexes[feedID],
- err = readIndexes(feedHome, feedVersions)
- if err != nil {
- log.Printf("while reading indexes in feed %s: %v\n", feed, err)
- continue
- }
- calendars[feedID], err = readCalendar(feedHome, feedVersions)
- if err != nil {
- log.Printf("while reading calendars in feed %s: %v\n", feed, err)
- continue
- }
- vehicles[feedID], err = readVehicles(feedHome, feedVersions)
- if err != nil {
- log.Printf("while reading vehicles in feed %s: %v\n", feed, err)
- continue
- }
- positionIndexes[feedID], err = createPositionIndex(feedHome, feedVersions)
- if err != nil {
- log.Printf("while creating position index in feed %s: %v\n", feed, err)
- continue
- }
- }
- traffic.CodeIndexes = codeIndexes
- traffic.NameIndexes = nameIndexes
- traffic.LineIndexes = lineIndexes
- traffic.LineIdIndexes = lineIdIndexes
- traffic.TripIndexes = tripIndexes
- traffic.Versions = allVersions
- traffic.Calendars = calendars
- traffic.Vehicles = vehicles
- traffic.PositionIndexes = positionIndexes
- traffic.FeedInfos = feedInfos
- log.Println("Initialised")
- if !alreadyInitialised {
- initedChan <- true
- }
- }
- doneChan <- true
- }
- func GetTripFromStop(tripID string, stopCode string, context Context, traffic *Traffic) ([]TimedStopStub, error) {
- stubs := []TimedStopStub{}
- var (
- order = -1
- trip Trip
- err error
- baseTime uint = 0
- time uint = 0
- )
- file, err := openTrips(context)
- if err != nil {
- return stubs, fmt.Errorf("while opening trips: %w", err)
- }
- defer file.Close()
- if stopCode != "" {
- startingStop, err := GetStop(stopCode, context, traffic)
- if err != nil {
- return stubs, fmt.Errorf("while getting starting stop: %w", err)
- }
- tripOffset := -1
- order = -1
- o := startingStop.Order[tripID]
- tripOffset = int(o.TripOffset)
- order = o.Sequence
- if tripOffset == -1 {
- return stubs, fmt.Errorf("trip for starting stop not found")
- }
- trip, err = GetTripByOffset(file, uint(tripOffset), context)
- if err != nil {
- return stubs, fmt.Errorf("while getting trip: %w", err)
- }
- } else {
- trip, err = GetTrip(file, tripID, context, traffic)
- if err != nil {
- return stubs, fmt.Errorf("while getting trip: %w", err)
- }
- }
- for _, departure := range trip.Departures {
- if departure.StopSequence >= order {
- stop, err := getStopByOffset(uint(departure.StopOffset), context, traffic)
- if err != nil {
- return stubs, fmt.Errorf("while getting stop: %w", err)
- }
- if baseTime != 0 {
- time = departure.Time - baseTime
- }
- stubs = append(stubs, TimedStopStub{
- StopStub: StopStub{
- Code: stop.Code,
- Name: stop.Name,
- NodeName: stop.NodeName,
- Zone: stop.Zone,
- OnDemand: departure.Pickup == BY_DRIVER || departure.Dropoff == BY_DRIVER,
- },
- Time: time,
- })
- }
- }
- return stubs, nil
- }
- func getStopByOffset(offset uint, context Context, traffic *Traffic) (Stop, error) { // todo offset should be uint64 everywhere
- result := _Result{
- Filename: "stops.bare",
- Offset: offset,
- TimetableHome: filepath.Join(context.DataHome, context.FeedID, string(context.Version)),
- }
- r, e := gott.NewResult(result).
- Bind(openFile).
- Bind(seek).
- Bind(unmarshalStop).
- Finish()
- if e != nil {
- return Stop{}, e
- } else {
- return r.(_Result).Stop, nil
- }
- }
- func getLineByOffset(offset uint, dataHome string, feedName string,
- versionCode Validity) (Line, error) {
- result := _Result{
- Filename: "lines.bare",
- Offset: offset,
- TimetableHome: filepath.Join(dataHome, feedName, string(versionCode)),
- }
- r, e := gott.NewResult(result).
- Bind(openFile).
- Bind(seek).
- Bind(unmarshalLine).
- Finish()
- if e != nil {
- return Line{}, e
- } else {
- return r.(_Result).Line, nil
- }
- }
- func getFeedInfo(dataHome string, feedName string, versionCode Validity) (FeedInfo, error) {
- result := _Result{
- Filename: "feed_info.bare",
- TimetableHome: filepath.Join(dataHome, feedName, string(versionCode)),
- }
- r, e := gott.NewResult(result).
- Bind(openFile).
- Bind(unmarshalFeedInfo).
- Finish()
- if e != nil {
- return FeedInfo{}, e
- } else {
- return r.(_Result).FeedInfo, nil
- }
- }
- func GetStop(stopCode string, context Context, traffic *Traffic) (Stop, error) {
- codeIndex := traffic.CodeIndexes[context.FeedID][context.Version]
- return getStopByOffset(codeIndex[stopCode], context, traffic)
- }
- func GetStopStub(stopCode string, lineID string, context Context, traffic *Traffic) (StopStub, error) {
- stop, err := GetStop(stopCode, context, traffic)
- if err != nil {
- return StopStub{}, err
- }
- var trip Trip
- var stopOrder = -1
- file, err := openTrips(context)
- if err != nil {
- return StopStub{}, fmt.Errorf("while opening trips: %w", err)
- }
- defer file.Close()
- for _, order := range stop.Order {
- offset := order.TripOffset
- trip, _ = GetTripByOffset(file, offset, context)
- if trip.LineID == lineID {
- stopOrder = order.Sequence
- break
- }
- }
- if stopOrder == -1 {
- return StopStub{}, fmt.Errorf("cannot the stop on given line")
- }
- ix, ok := slices.BinarySearchFunc(trip.Departures, stopOrder, func(d Departure, stopOrder int) int {
- return d.StopSequence - stopOrder
- })
- if !ok {
- return StopStub{}, fmt.Errorf("cannot find departure at sequence %d", stopOrder)
- }
- stopStub := StopStub{
- Code: stop.Code,
- Name: stop.Name,
- NodeName: stop.NodeName,
- Zone: stop.Zone,
- OnDemand: trip.Departures[ix].Pickup == BY_DRIVER || trip.Departures[ix].Dropoff == BY_DRIVER,
- }
- return stopStub, nil
- }
- func GetLine(id string, context Context, traffic *Traffic) (Line, error) {
- index := traffic.LineIdIndexes[context.FeedID][context.Version]
- return getLineByOffset(index[id], context.DataHome, context.FeedID, context.Version)
- }
- func GetLineOld(name string, context Context, traffic *Traffic) (Line, error) {
- index := traffic.LineIndexes[context.FeedID][context.Version]
- for _, o := range index {
- cleanedName, err := CleanQuery(name, traffic.Feeds[context.FeedID])
- if err != nil {
- return Line{}, err
- }
- if o.Name == cleanedName {
- return getLineByOffset(o.Offsets[0], context.DataHome, context.FeedID, context.Version)
- }
- }
- return Line{}, nil
- }
- func QueryLines(query string, dataHome string, feedName string,
- versionCode Validity, traffic *Traffic) ([]Line, error) {
- linesSet := map[string]Line{}
- index := traffic.LineIndexes[feedName][versionCode]
- cleanQuery, err := CleanQuery(query, traffic.Feeds[feedName])
- if err != nil {
- return []Line{}, fmt.Errorf("while cleaning query: %w", err)
- }
- results := fuzzy.FindFrom(cleanQuery, index)
- for _, result := range results {
- for _, offset := range index[result.Index].Offsets {
- line, err := getLineByOffset(offset, dataHome, feedName, versionCode)
- if err != nil {
- return []Line{}, fmt.Errorf("while getting line for %s: %w", result.Str, err)
- }
- linesSet[line.Id] = line
- }
- }
- lines := make([]Line, len(linesSet))
- i := 0
- for _, line := range linesSet {
- lines[i] = line
- i++
- }
- return lines, nil
- }
- func QueryStops(query string, context Context, traffic *Traffic) ([]Stop, error) {
- stopsSet := map[string]Stop{}
- nameIndex := traffic.NameIndexes[context.FeedID][context.Version]
- results := fuzzy.FindFrom(query, nameIndex)
- for _, result := range results {
- for _, offset := range nameIndex[result.Index].Offsets {
- stop, err := getStopByOffset(offset, context, traffic)
- if err != nil {
- return []Stop{}, err
- }
- stopsSet[stop.Id] = stop
- }
- }
- stops := make([]Stop, len(stopsSet))
- i := 0
- for _, stop := range stopsSet {
- stops[i] = stop
- i++
- }
- return stops, nil
- }
- func GetStopsNear(location Position, context Context, traffic *Traffic) ([]Stop, error) {
- stops := []Stop{}
- positionIndex := traffic.PositionIndexes[context.FeedID][context.Version]
- codeIndex := traffic.CodeIndexes[context.FeedID][context.Version]
- spatials := positionIndex.NearestNeighbors(12, rtreego.Point{location.Lat, location.Lon})
- for _, spatial := range spatials {
- stop, err := getStopByOffset(codeIndex[spatial.(Stop).Code], context, traffic)
- if err != nil {
- return stops, fmt.Errorf("while getting stop by offset for %s: %w", spatial.(Stop).Code, err)
- }
- stops = append(stops, stop)
- }
- return stops, nil
- }
- func GetLanguage(ctx Context) (string, error) {
- feedInfo, err := getFeedInfo(ctx.DataHome, ctx.FeedID, ctx.Version)
- return feedInfo.Language, err
- }
- func CleanOldVersions(cfg config.Config, feed Feed, t *Traffic, feedID string, allValidities []string) ([]Version, []string, error) {
- feedVersions := []Version{}
- deletedValidities := []string{}
- timezone, err := GetTimezone(Stop{}, t, feedID)
- if err != nil {
- return feedVersions, deletedValidities, fmt.Errorf("while getting timezone: %w", err)
- }
- now := time.Now().In(timezone)
- versionsMap := map[string]Version{}
- allVersions := []Version{}
- for _, validity := range allValidities {
- version, err := MakeVersionTimezone(validity, timezone)
- if err != nil {
- return feedVersions, deletedValidities, fmt.Errorf("while making version of %s: %w", version, err)
- }
- allVersions = append(allVersions, version)
- versionsMap[validity] = version
- }
- validVersions := FindValidVersions(allVersions, now)
- validVersionsMap := map[string]bool{}
- for _, version := range validVersions {
- validVersionsMap[version.String()] = true
- }
- err = file.CleanOldVersions(FeedPath(cfg, feed), validVersionsMap)
- if err != nil {
- return feedVersions, deletedValidities, fmt.Errorf("while removing files: %w", err)
- }
- for _, version := range validVersions {
- feedVersions = append(feedVersions, version)
- }
- for _, version := range allVersions {
- if _, ok := validVersionsMap[version.String()]; !ok {
- deletedValidities = append(deletedValidities, version.String())
- }
- }
- return feedVersions, deletedValidities, nil
- }
- func createSmallerRect(side float64, rect *rtreego.Rect) (*rtreego.Rect, Position, Position, error) {
- halfSide := side / 2
- latMid := rect.PointCoord(0) + (rect.LengthsCoord(0) / 2)
- lonMid := rect.PointCoord(1) + (rect.LengthsCoord(1) / 2)
- lb := Position{Lat: latMid - halfSide, Lon: lonMid - halfSide}
- rt := Position{Lat: latMid + halfSide, Lon: lonMid + halfSide}
- rect, err := rtreego.NewRectFromPoints(rtreego.Point{lb.Lat, lb.Lon}, rtreego.Point{rt.Lat, rt.Lon})
- return rect, lb, rt, err
- }
- func GetStopsIn(lb, rt Position, context Context, traffic *Traffic) ([]Stop, error) {
- limit := 0.0005
- side := 0.0224 // sqrt(0.0005)
- stops := []Stop{}
- // TODO does it take into account rect 179 -> -179 latitude?
- rect, err := rtreego.NewRectFromPoints(rtreego.Point{lb.Lat, lb.Lon}, rtreego.Point{rt.Lat, rt.Lon})
- if err != nil {
- return stops, fmt.Errorf("while creating a rect: %w", err)
- }
- if rect.Size() > limit {
- rect, _, _, err = createSmallerRect(side, rect)
- if err != nil {
- return stops, fmt.Errorf("while creating the smaller rect: %w", err)
- }
- }
- positionIndex := traffic.PositionIndexes[context.FeedID][context.Version]
- codeIndex := traffic.CodeIndexes[context.FeedID][context.Version]
- spatials := positionIndex.SearchIntersect(rect)
- for _, spatial := range spatials {
- stop, err := getStopByOffset(codeIndex[spatial.(Stop).Code], context, traffic)
- if err != nil {
- return stops, fmt.Errorf("while getting stop by offset for %s: %w", spatial.(Stop).Code, err)
- }
- stops = append(stops, stop)
- }
- return stops, nil
- }
- func GetVehiclesIn(lb, rt Position, context Context, t *Traffic) ([]VehicleStatus, error) {
- limit := 0.0005
- side := 0.0224 // sqrt(0.0005)
- vehicles := []VehicleStatus{}
- rect, err := rtreego.NewRectFromPoints(rtreego.Point{lb.Lat, lb.Lon}, rtreego.Point{rt.Lat, rt.Lon})
- if err != nil {
- return vehicles, fmt.Errorf("while creating a rect: %w", err)
- }
- if rect.Size() > limit {
- rect, lb, rt, err = createSmallerRect(side, rect)
- if err != nil {
- return vehicles, fmt.Errorf("while creating the smaller rect: %w", err)
- }
- }
- vehiclesRt := getVehiclePositions(context, t, lb, rt)
- for _, vehicleRt := range vehiclesRt {
- if rt.Lon < float64(vehicleRt.Longitude) || lb.Lon > float64(vehicleRt.Longitude) {
- continue
- }
- lat := float64(vehicleRt.Latitude)
- if lb.Lat < rt.Lat {
- if lb.Lat < lat && lat < rt.Lat {
- vehicles = append(vehicles, vehicleRt)
- }
- } else {
- if lat > lb.Lat || lat < rt.Lat {
- vehicles = append(vehicles, vehicleRt)
- }
- }
- }
- return vehicles, nil
- }
|