diff --git a/apps/api/.env.example b/apps/api/.env.example index 25241b7..32467aa 100644 --- a/apps/api/.env.example +++ b/apps/api/.env.example @@ -1 +1,2 @@ PORT=8080 +METRA_API_KEY= diff --git a/apps/api/cmd/api/main.go b/apps/api/cmd/api/main.go deleted file mode 100644 index 8a6caf9..0000000 --- a/apps/api/cmd/api/main.go +++ /dev/null @@ -1,25 +0,0 @@ -package main - -import ( - "log" - "net/http" - "os" -) - -func main() { - port := os.Getenv("PORT") - if port == "" { - port = "8080" - } - - mux := http.NewServeMux() - mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write([]byte("ok")) - }) - - log.Printf("starting server on :%s", port) - if err := http.ListenAndServe(":"+port, mux); err != nil { - log.Fatal(err) - } -} diff --git a/apps/api/go.mod b/apps/api/go.mod index 58aa296..08b25c0 100644 --- a/apps/api/go.mod +++ b/apps/api/go.mod @@ -1,3 +1,12 @@ -module github.com/rnielsen/tracky/api +module github.com/Tracky-Trains/tracky/api go 1.26 + +require ( + github.com/MobilityData/gtfs-realtime-bindings/golang/gtfs v1.0.0 + google.golang.org/protobuf v1.36.11 +) + +require golang.org/x/crypto v0.49.0 + +require github.com/joho/godotenv v1.5.1 diff --git a/apps/api/go.sum b/apps/api/go.sum new file mode 100644 index 0000000..fcb0050 --- /dev/null +++ b/apps/api/go.sum @@ -0,0 +1,10 @@ +github.com/MobilityData/gtfs-realtime-bindings/golang/gtfs v1.0.0 h1:f4P+fVYmSIWj4b/jvbMdmrmsx/Xb+5xCpYYtVXOdKoc= +github.com/MobilityData/gtfs-realtime-bindings/golang/gtfs v1.0.0/go.mod h1:nSmbVVQSM4lp9gYvVaaTotnRxSwZXEdFnJARofg5V4g= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= +golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= diff --git a/apps/api/gtfs/realtime.go b/apps/api/gtfs/realtime.go new file mode 100644 index 0000000..9c26bdf --- /dev/null +++ b/apps/api/gtfs/realtime.go @@ -0,0 +1,203 @@ +package gtfs + +import ( + "context" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "time" + + gtfsrt "github.com/MobilityData/gtfs-realtime-bindings/golang/gtfs" + "google.golang.org/protobuf/proto" + + "github.com/Tracky-Trains/tracky/api/spec" +) + +// FetchAndParsePositions downloads a GTFS-RT vehicle positions feed and returns +// parsed TrainPositions stamped with providerID. +func FetchAndParsePositions( + ctx context.Context, + url string, + providerID string, + apiKey string, +) ([]spec.TrainPosition, error) { + feed, err := fetchFeed(ctx, url, apiKey) + if err != nil { + return nil, fmt.Errorf("gtfs-rt positions: %w", err) + } + + now := time.Now() + var positions []spec.TrainPosition + + if b, err := json.MarshalIndent(feed.Entity, "", " "); err == nil { + filename := fmt.Sprintf("debug_%s_positions.json", providerID) + if err := os.WriteFile(filename, b, 0644); err != nil { + log.Printf("[DEBUG] failed to write %s: %v", filename, err) + } else { + log.Printf("[DEBUG] %s realtime positions (%d) saved to %s", providerID, len(feed.Entity), filename) + } + } + + for _, entity := range feed.Entity { + vp := entity.Vehicle + if vp == nil || vp.Trip == nil { + continue + } + + pos := spec.TrainPosition{ + Provider: providerID, + LastUpdated: now, + } + + if trip := vp.Trip; trip != nil { + pos.TripID = providerID + ":" + trip.GetTripId() + pos.RouteID = providerID + ":" + trip.GetRouteId() + pos.RunDate = parseStartDate(trip.GetStartDate()) + } + + if vehicle := vp.Vehicle; vehicle != nil { + pos.TrainNumber = vehicle.GetLabel() + pos.VehicleID = vehicle.GetId() + } + + if p := vp.Position; p != nil { + lat := float64(p.GetLatitude()) + lon := float64(p.GetLongitude()) + pos.Lat = &lat + pos.Lon = &lon + + if p.Bearing != nil { + h := fmt.Sprintf("%.0f", float64(p.GetBearing())) + pos.Heading = &h + } + + if p.Speed != nil { + // GTFS-RT speed is m/s — convert to mph + mph := float64(p.GetSpeed()) * 2.23694 + pos.SpeedMPH = &mph + } + } + + if vp.StopId != nil { + stopID := providerID + ":" + vp.GetStopId() + pos.CurrentStopCode = &stopID + } + + if vp.Timestamp != nil { + pos.LastUpdated = time.Unix(int64(vp.GetTimestamp()), 0) + } + + positions = append(positions, pos) + } + + return positions, nil +} + +// FetchAndParseTripUpdates downloads a GTFS-RT trip updates feed and returns +// parsed TrainStopTimes stamped with providerID. +func FetchAndParseTripUpdates( + ctx context.Context, + url string, + providerID string, + apiKey string, +) ([]spec.TrainStopTime, error) { + feed, err := fetchFeed(ctx, url, apiKey) + if err != nil { + return nil, fmt.Errorf("gtfs-rt trip updates: %w", err) + } + + now := time.Now() + var stopTimes []spec.TrainStopTime + + if b, err := json.MarshalIndent(feed.Entity, "", " "); err == nil { + filename := fmt.Sprintf("debug_%s_stoptimes.json", providerID) + if err := os.WriteFile(filename, b, 0644); err != nil { + log.Printf("[DEBUG] failed to write %s: %v", filename, err) + } else { + log.Printf("[DEBUG] %s trip updates (%d entities) saved to %s", providerID, len(feed.Entity), filename) + } + } + + for _, entity := range feed.Entity { + tu := entity.TripUpdate + if tu == nil { + continue + } + + tripID := "" + runDate := time.Time{} + + if trip := tu.Trip; trip != nil { + tripID = providerID + ":" + trip.GetTripId() + runDate = parseStartDate(trip.GetStartDate()) + } + + for _, stu := range tu.StopTimeUpdate { + st := spec.TrainStopTime{ + Provider: providerID, + TripID: tripID, + RunDate: runDate, + StopCode: providerID + ":" + stu.GetStopId(), + StopSequence: int(stu.GetStopSequence()), + LastUpdated: now, + } + + if arr := stu.Arrival; arr != nil && arr.Time != nil { + t := time.Unix(arr.GetTime(), 0) + st.EstimatedArr = &t + } + + if dep := stu.Departure; dep != nil && dep.Time != nil { + t := time.Unix(dep.GetTime(), 0) + st.EstimatedDep = &t + } + + stopTimes = append(stopTimes, st) + } + } + + return stopTimes, nil +} + +// fetchFeed downloads and unmarshals a GTFS-RT protobuf feed. +func fetchFeed(ctx context.Context, url string, apiKey string) (*gtfsrt.FeedMessage, error) { + data, err := fetchBytes(ctx, url, apiKey) + if err != nil { + return nil, fmt.Errorf("fetch %s: %w", url, err) + } + var feed gtfsrt.FeedMessage + if err := proto.Unmarshal(data, &feed); err != nil { + return nil, fmt.Errorf("fetch %s: unmarshal: %w", url, err) + } + return &feed, nil +} + +// fetchBytes performs a GET request and returns the response body. +// If apiKey is non-empty, it's sent as a Bearer token in the Authorization header. +func fetchBytes(ctx context.Context, url string, apiKey string) ([]byte, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, err + } + if apiKey != "" { + req.Header.Set("Authorization", "Bearer "+apiKey) + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("HTTP %d", resp.StatusCode) + } + return io.ReadAll(resp.Body) +} + +// parseStartDate parses a GTFS-RT start_date string (YYYYMMDD) into a time.Time. +func parseStartDate(s string) time.Time { + t, _ := time.Parse("20060102", s) + return t +} diff --git a/apps/api/gtfs/static.go b/apps/api/gtfs/static.go new file mode 100644 index 0000000..352c5f8 --- /dev/null +++ b/apps/api/gtfs/static.go @@ -0,0 +1,348 @@ +package gtfs + +import ( + "archive/zip" + "bytes" + "context" + "encoding/csv" + "fmt" + "io" + "net/http" + "strconv" + "time" + + "github.com/Tracky-Trains/tracky/api/spec" +) + +// FetchAndParseStatic downloads a GTFS zip from url, parses it, and returns +// slices of spec types stamped with agencyID. +func FetchAndParseStatic( + ctx context.Context, + url string, + agencyID string, +) ( + agencies []spec.Agency, + routes []spec.Route, + stops []spec.Stop, + trips []spec.Trip, + stopTimes []spec.ScheduledStopTime, + calendars []spec.ServiceCalendar, + exceptions []spec.ServiceException, + err error, +) { + data, err := fetchStaticBytes(ctx, url) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("gtfs: fetch %s: %w", url, err) + } + + zr, err := zip.NewReader(bytes.NewReader(data), int64(len(data))) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("gtfs: open zip: %w", err) + } + + files := indexZip(zr) + + if f, ok := files["agency.txt"]; ok { + agencies, err = parseAgency(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + if f, ok := files["routes.txt"]; ok { + routes, err = parseRoutes(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + if f, ok := files["stops.txt"]; ok { + stops, err = parseStops(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + if f, ok := files["trips.txt"]; ok { + trips, err = parseTrips(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + if f, ok := files["stop_times.txt"]; ok { + stopTimes, err = parseStopTimes(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + // calendar.txt is optional — some feeds use only calendar_dates.txt + if f, ok := files["calendar.txt"]; ok { + calendars, err = parseCalendar(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + if f, ok := files["calendar_dates.txt"]; ok { + exceptions, err = parseCalendarDates(f, agencyID) + if err != nil { + return nil, nil, nil, nil, nil, nil, nil, err + } + } + + return +} + +// fetchStaticBytes performs a GET request and returns the response body. +func fetchStaticBytes(ctx context.Context, url string) ([]byte, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, err + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("HTTP %d", resp.StatusCode) + } + return io.ReadAll(resp.Body) +} + +// indexZip returns a map of filename → *zip.File for the archive. +func indexZip(zr *zip.Reader) map[string]*zip.File { + m := make(map[string]*zip.File, len(zr.File)) + for _, f := range zr.File { + m[f.Name] = f + } + return m +} + +// readCSV opens a zip file and returns all rows as a slice of header→value maps. +func readCSV(f *zip.File) ([]map[string]string, error) { + rc, err := f.Open() + if err != nil { + return nil, fmt.Errorf("gtfs: open %s: %w", f.Name, err) + } + defer rc.Close() + + r := csv.NewReader(rc) + r.TrimLeadingSpace = true + + header, err := r.Read() + if err != nil { + return nil, fmt.Errorf("gtfs: read header %s: %w", f.Name, err) + } + // Trim BOM from first field if present + if len(header) > 0 { + header[0] = trimBOM(header[0]) + } + + var rows []map[string]string + for { + rec, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("gtfs: read %s: %w", f.Name, err) + } + row := make(map[string]string, len(header)) + for i, col := range header { + if i < len(rec) { + row[col] = rec[i] + } + } + rows = append(rows, row) + } + return rows, nil +} + +func trimBOM(s string) string { + if len(s) >= 3 && s[0] == 0xEF && s[1] == 0xBB && s[2] == 0xBF { + return s[3:] + } + return s +} + +func optStr(m map[string]string, key string) *string { + v, ok := m[key] + if !ok || v == "" { + return nil + } + return &v +} + +func optBool(m map[string]string, key string) *bool { + v, ok := m[key] + if !ok || v == "" { + return nil + } + b := v == "1" + return &b +} + +func optInt(m map[string]string, key string) *int { + v, ok := m[key] + if !ok || v == "" { + return nil + } + i, err := strconv.Atoi(v) + if err != nil { + return nil + } + return &i +} + +func parseAgency(f *zip.File, agencyID string) ([]spec.Agency, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.Agency, 0, len(rows)) + for _, r := range rows { + out = append(out, spec.Agency{ + AgencyID: agencyID, + Name: r["agency_name"], + URL: r["agency_url"], + Timezone: r["agency_timezone"], + Lang: optStr(r, "agency_lang"), + Phone: optStr(r, "agency_phone"), + }) + } + return out, nil +} + +func parseRoutes(f *zip.File, agencyID string) ([]spec.Route, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.Route, 0, len(rows)) + for _, r := range rows { + out = append(out, spec.Route{ + AgencyID: agencyID, + RouteID: agencyID + ":" + r["route_id"], + ShortName: r["route_short_name"], + LongName: r["route_long_name"], + Color: r["route_color"], + TextColor: r["route_text_color"], + ShapeID: optStr(r, "shape_id"), + }) + } + return out, nil +} + +func parseStops(f *zip.File, agencyID string) ([]spec.Stop, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.Stop, 0, len(rows)) + for _, r := range rows { + lat, _ := strconv.ParseFloat(r["stop_lat"], 64) + lon, _ := strconv.ParseFloat(r["stop_lon"], 64) + out = append(out, spec.Stop{ + AgencyID: agencyID, + StopID: agencyID + ":" + r["stop_id"], + Code: r["stop_code"], + Name: r["stop_name"], + Lat: lat, + Lon: lon, + Timezone: optStr(r, "stop_timezone"), + WheelchairBoarding: optBool(r, "wheelchair_boarding"), + }) + } + return out, nil +} + +func parseTrips(f *zip.File, agencyID string) ([]spec.Trip, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.Trip, 0, len(rows)) + for _, r := range rows { + out = append(out, spec.Trip{ + AgencyID: agencyID, + TripID: agencyID + ":" + r["trip_id"], + RouteID: agencyID + ":" + r["route_id"], + ServiceID: r["service_id"], + Headsign: r["trip_headsign"], + ShapeID: optStr(r, "shape_id"), + DirectionID: optInt(r, "direction_id"), + }) + } + return out, nil +} + +func parseStopTimes(f *zip.File, agencyID string) ([]spec.ScheduledStopTime, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.ScheduledStopTime, 0, len(rows)) + for _, r := range rows { + seq, _ := strconv.Atoi(r["stop_sequence"]) + out = append(out, spec.ScheduledStopTime{ + AgencyID: agencyID, + TripID: agencyID + ":" + r["trip_id"], + StopID: agencyID + ":" + r["stop_id"], + StopSequence: seq, + ArrivalTime: optStr(r, "arrival_time"), + DepartureTime: optStr(r, "departure_time"), + Timepoint: optBool(r, "timepoint"), + DropOffType: optInt(r, "drop_off_type"), + PickupType: optInt(r, "pickup_type"), + }) + } + return out, nil +} + +func parseCalendar(f *zip.File, agencyID string) ([]spec.ServiceCalendar, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.ServiceCalendar, 0, len(rows)) + for _, r := range rows { + start, _ := time.Parse("20060102", r["start_date"]) + end, _ := time.Parse("20060102", r["end_date"]) + out = append(out, spec.ServiceCalendar{ + AgencyID: agencyID, + ServiceID: r["service_id"], + Monday: r["monday"] == "1", + Tuesday: r["tuesday"] == "1", + Wednesday: r["wednesday"] == "1", + Thursday: r["thursday"] == "1", + Friday: r["friday"] == "1", + Saturday: r["saturday"] == "1", + Sunday: r["sunday"] == "1", + StartDate: start, + EndDate: end, + }) + } + return out, nil +} + +func parseCalendarDates(f *zip.File, agencyID string) ([]spec.ServiceException, error) { + rows, err := readCSV(f) + if err != nil { + return nil, err + } + out := make([]spec.ServiceException, 0, len(rows)) + for _, r := range rows { + date, _ := time.Parse("20060102", r["date"]) + exType, _ := strconv.Atoi(r["exception_type"]) + out = append(out, spec.ServiceException{ + AgencyID: agencyID, + ServiceID: r["service_id"], + Date: date, + ExceptionType: exType, + }) + } + return out, nil +} diff --git a/apps/api/main.go b/apps/api/main.go new file mode 100644 index 0000000..b1af9db --- /dev/null +++ b/apps/api/main.go @@ -0,0 +1,44 @@ +package main + +import ( + "log" + "net/http" + "os" + + _ "github.com/joho/godotenv/autoload" + + "github.com/Tracky-Trains/tracky/api/providers" + "github.com/Tracky-Trains/tracky/api/providers/amtrak" + "github.com/Tracky-Trains/tracky/api/providers/brightline" + "github.com/Tracky-Trains/tracky/api/providers/metra" + "github.com/Tracky-Trains/tracky/api/providers/metrotransit" + "github.com/Tracky-Trains/tracky/api/providers/trirail" + "github.com/Tracky-Trains/tracky/api/routes" +) + +func main() { + port := os.Getenv("PORT") + if port == "" { + port = "8080" + } + + registry := providers.NewRegistry() + registry.Register(amtrak.New()) + registry.Register(brightline.New()) + registry.Register(metra.New()) + registry.Register(metrotransit.New()) + registry.Register(trirail.New()) + + mux := http.NewServeMux() + mux.HandleFunc("GET /health", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("ok")) + }) + + routes.Setup(mux, registry) + + log.Printf("starting server on :%s", port) + if err := http.ListenAndServe(":"+port, mux); err != nil { + log.Fatal(err) + } +} diff --git a/apps/api/providers/amtrak/amtrak.go b/apps/api/providers/amtrak/amtrak.go new file mode 100644 index 0000000..dc6c8d3 --- /dev/null +++ b/apps/api/providers/amtrak/amtrak.go @@ -0,0 +1,281 @@ +package amtrak + +import ( + "context" + "crypto/aes" + "crypto/cipher" + "crypto/sha1" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "strconv" + "strings" + "time" + + "golang.org/x/crypto/pbkdf2" + + "github.com/Tracky-Trains/tracky/api/providers" + "github.com/Tracky-Trains/tracky/api/providers/base" + "github.com/Tracky-Trains/tracky/api/spec" +) + +const ( + staticURL = "https://content.amtrak.com/content/gtfs/GTFS.zip" + realtimeURL = "https://maps.amtrak.com/services/MapDataService/trains/getTrainsData" + + saltHex = "9a3686ac" + ivHex = "c6eb2f7f5c4740c1a2f708fefd947d39" + publicKey = "69af143c-e8cf-47f8-bf09-fc1f61e5cc33" + masterSegment = 88 // chars at the end of the payload holding the encrypted private key +) + +// amtrakTZ maps single-letter Amtrak timezone codes to IANA names. +var amtrakTZ = map[string]string{ + "P": "America/Los_Angeles", + "M": "America/Denver", + "C": "America/Chicago", + "E": "America/New_York", +} + +// Provider wraps the base provider and overrides realtime fetching. +// Amtrak does not publish a standard GTFS-RT feed; realtime data comes +// from their encrypted map API. +type Provider struct { + base *base.Provider +} + +// New returns an Amtrak provider. +func New() *Provider { + return &Provider{ + base: base.New(base.Config{ + ProviderID: "amtrak", + Name: "Amtrak", + StaticURL: staticURL, + // PositionsURL / TripUpdatesURL intentionally empty — FetchRealtime is overridden below + }), + } +} + +// ID returns "amtrak". +func (p *Provider) ID() string { + return p.base.ID() +} + +// FetchStatic delegates to the base provider — Amtrak's GTFS zip is standard. +func (p *Provider) FetchStatic(ctx context.Context) (*providers.StaticFeed, error) { + return p.base.FetchStatic(ctx) +} + +// FetchRealtime fetches and decrypts the Amtrak train status API, mapping +// the response to TrainPositions and TrainStopTimes. +func (p *Provider) FetchRealtime(ctx context.Context) (*providers.RealtimeFeed, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, realtimeURL, nil) + if err != nil { + return nil, fmt.Errorf("amtrak: build request: %w", err) + } + req.Header.Set("User-Agent", "tracky") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, fmt.Errorf("amtrak: fetch: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("amtrak: unexpected status %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("amtrak: read body: %w", err) + } + + raw := string(body) + if len(raw) <= masterSegment { + return nil, fmt.Errorf("amtrak: payload too short (%d bytes)", len(raw)) + } + + plaintext, err := getDecryptedData(raw) + if err != nil { + return nil, fmt.Errorf("amtrak: decrypt: %w", err) + } + + if err := os.WriteFile("debug_amtrak.json", plaintext, 0644); err != nil { + log.Printf("[DEBUG] failed to write debug_amtrak.json: %v", err) + } else { + log.Printf("[DEBUG] amtrak realtime data saved to debug_amtrak.json") + } + + var data trainData + if err := json.Unmarshal(plaintext, &data); err != nil { + return nil, fmt.Errorf("amtrak: parse json: %w", err) + } + + var positions []spec.TrainPosition + var stopTimes []spec.TrainStopTime + now := time.Now() + + for _, f := range data.Features { + if len(f.Geometry.Coordinates) < 2 { + continue + } + props := f.Properties + tripID := "amtrak:" + props.TrainNum + runDate := now.UTC().Truncate(24 * time.Hour) + + // --- TrainPosition --- + lon := f.Geometry.Coordinates[0] + lat := f.Geometry.Coordinates[1] + + pos := spec.TrainPosition{ + Provider: "amtrak", + TripID: tripID, + RunDate: runDate, + TrainNumber: props.TrainNum, + RouteID: props.RouteName, + Lat: &lat, + Lon: &lon, + LastUpdated: now, + } + + if props.Heading != "" { + h := props.Heading + pos.Heading = &h + } + + if mph, err := strconv.ParseFloat(props.Velocity, 64); err == nil { + pos.SpeedMPH = &mph + } + + if props.EventCode != "" { + code := "amtrak:" + props.EventCode + pos.CurrentStopCode = &code + } + + if t := parseAmtrakTime(props.UpdatedAt, ""); t != nil { + pos.LastUpdated = *t + } + + positions = append(positions, pos) + + // --- TrainStopTimes from StationN entries --- + for _, station := range props.Stations { + st := spec.TrainStopTime{ + Provider: "amtrak", + TripID: tripID, + RunDate: runDate, + StopCode: "amtrak:" + station.Code, + StopSequence: station.Sequence, + LastUpdated: now, + } + + st.ScheduledArr = parseAmtrakTime(station.SchArr, station.Tz) + st.ScheduledDep = parseAmtrakTime(station.SchDep, station.Tz) + + if station.PostArr != "" || station.PostDep != "" { + // Train has passed this stop — post times are actuals. + st.ActualArr = parseAmtrakTime(station.PostArr, station.Tz) + st.ActualDep = parseAmtrakTime(station.PostDep, station.Tz) + } else { + // Upcoming stop — est times are live estimates. + st.EstimatedArr = parseAmtrakTime(station.EstArr, station.Tz) + st.EstimatedDep = parseAmtrakTime(station.EstDep, station.Tz) + } + + stopTimes = append(stopTimes, st) + } + } + + return &providers.RealtimeFeed{ + Positions: positions, + StopTimes: stopTimes, + }, nil +} + +// parseAmtrakTime parses an Amtrak datetime string in the format "01/02/2006 15:04:05". +// tz is a single-letter Amtrak timezone code; if empty or unknown, UTC is used. +// Returns nil on empty input or parse failure. +func parseAmtrakTime(s, tz string) *time.Time { + if s == "" { + return nil + } + loc := time.UTC + if ianaName, ok := amtrakTZ[tz]; ok { + if l, err := time.LoadLocation(ianaName); err == nil { + loc = l + } + } + t, err := time.ParseInLocation("01/02/2006 15:04:05", s, loc) + if err != nil { + return nil + } + return &t +} + +// getDecryptedData implements the two-pass decryption scheme: +// 1. The last masterSegment chars are an AES-encrypted private key, decrypted with publicKey. +// 2. The remainder is the encrypted train data, decrypted with the recovered private key. +func getDecryptedData(raw string) ([]byte, error) { + mainContent := raw[:len(raw)-masterSegment] + encryptedPrivateKey := raw[len(raw)-masterSegment:] + + privateKeyBytes, err := decrypt(encryptedPrivateKey, publicKey) + if err != nil { + return nil, fmt.Errorf("decrypting private key: %w", err) + } + privateKey := strings.SplitN(string(privateKeyBytes), "|", 2)[0] + + plaintext, err := decrypt(mainContent, privateKey) + if err != nil { + return nil, fmt.Errorf("decrypting train data: %w", err) + } + return plaintext, nil +} + +// decrypt decrypts a base64-encoded AES-128-CBC ciphertext using a key derived +// via PBKDF2-SHA1 (1000 iterations, 16-byte output) with the fixed salt and IV. +func decrypt(content, key string) ([]byte, error) { + salt, err := hex.DecodeString(saltHex) + if err != nil { + return nil, err + } + iv, err := hex.DecodeString(ivHex) + if err != nil { + return nil, err + } + + derivedKey := pbkdf2.Key([]byte(key), salt, 1000, 16, sha1.New) + + ciphertext, err := base64.StdEncoding.DecodeString(content) + if err != nil { + return nil, fmt.Errorf("base64 decode: %w", err) + } + + block, err := aes.NewCipher(derivedKey) + if err != nil { + return nil, err + } + + if len(ciphertext)%aes.BlockSize != 0 { + return nil, fmt.Errorf("ciphertext length %d not a multiple of block size", len(ciphertext)) + } + + cipher.NewCBCDecrypter(block, iv).CryptBlocks(ciphertext, ciphertext) + return pkcs7Unpad(ciphertext), nil +} + +func pkcs7Unpad(b []byte) []byte { + if len(b) == 0 { + return b + } + pad := int(b[len(b)-1]) + if pad == 0 || pad > aes.BlockSize || pad > len(b) { + return b + } + return b[:len(b)-pad] +} diff --git a/apps/api/providers/amtrak/types.go b/apps/api/providers/amtrak/types.go new file mode 100644 index 0000000..80006bd --- /dev/null +++ b/apps/api/providers/amtrak/types.go @@ -0,0 +1,125 @@ +package amtrak + +import ( + "encoding/json" + "sort" + "strconv" + "strings" +) + +// trainData is the top-level GeoJSON FeatureCollection returned after decryption. +type trainData struct { + Features []trainFeature `json:"features"` +} + +// trainFeature is a single train as a GeoJSON feature. +type trainFeature struct { + Properties trainProperties `json:"properties"` + Geometry trainGeometry `json:"geometry"` +} + +// trainGeometry is a GeoJSON Point: coordinates are [lon, lat]. +type trainGeometry struct { + Coordinates []float64 `json:"coordinates"` +} + +// trainProperties holds the known scalar fields from the Amtrak properties object, +// plus Stations which is populated by the custom unmarshaler from the StationN keys. +type trainProperties struct { + TrainNum string `json:"TrainNum"` + RouteName string `json:"RouteName"` + Velocity string `json:"Velocity"` + Heading string `json:"Heading"` + EventCode string `json:"EventCode"` + UpdatedAt string `json:"updated_at"` + + // Stations is derived from the StationN keys by UnmarshalJSON, sorted by sequence. + Stations []stationEntry +} + +// UnmarshalJSON handles the trainProperties object. Known scalar fields are decoded +// normally via a shadow struct; StationN keys (Station1, Station2, ...) are collected +// into Stations. Each non-null StationN value is a JSON string that itself encodes a +// JSON object, so two levels of unmarshaling are required. +func (p *trainProperties) UnmarshalJSON(data []byte) error { + // Decode known scalar fields using a shadow alias to avoid recursion. + type shadow struct { + TrainNum string `json:"TrainNum"` + RouteName string `json:"RouteName"` + Velocity string `json:"Velocity"` + Heading string `json:"Heading"` + EventCode string `json:"EventCode"` + UpdatedAt string `json:"updated_at"` + } + var s shadow + if err := json.Unmarshal(data, &s); err != nil { + return err + } + p.TrainNum = s.TrainNum + p.RouteName = s.RouteName + p.Velocity = s.Velocity + p.Heading = s.Heading + p.EventCode = s.EventCode + p.UpdatedAt = s.UpdatedAt + + // Scan for StationN keys. + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + for key, val := range raw { + if !strings.HasPrefix(key, "Station") { + continue + } + seq, err := strconv.Atoi(key[len("Station"):]) + if err != nil || seq <= 0 { + continue + } + if string(val) == "null" { + continue + } + + // The value is a JSON string whose content is another JSON object. + var jsonStr string + if err := json.Unmarshal(val, &jsonStr); err != nil { + continue + } + var entry stationEntry + if err := json.Unmarshal([]byte(jsonStr), &entry); err != nil { + continue + } + entry.Sequence = seq + p.Stations = append(p.Stations, entry) + } + + sort.Slice(p.Stations, func(i, j int) bool { + return p.Stations[i].Sequence < p.Stations[j].Sequence + }) + + return nil +} + +// stationEntry represents a single stop's status as embedded in a StationN value. +// +// Time semantics: +// - PostArr / PostDep present → train has already passed (actual times) +// - EstArr / EstDep present → upcoming stop with a live estimate +// - SchArr / SchDep always → the static scheduled time +// +// The Tz field is a single-letter abbreviation: P=Pacific, M=Mountain, C=Central, E=Eastern. +type stationEntry struct { + Sequence int // populated from the key name, not the JSON + + Code string `json:"code"` + Tz string `json:"tz"` + + SchArr string `json:"scharr"` + SchDep string `json:"schdep"` + + EstArr string `json:"estarr"` + EstDep string `json:"estdep"` + + PostArr string `json:"postarr"` + PostDep string `json:"postdep"` +} diff --git a/apps/api/providers/base/base.go b/apps/api/providers/base/base.go new file mode 100644 index 0000000..c44aaa1 --- /dev/null +++ b/apps/api/providers/base/base.go @@ -0,0 +1,78 @@ +package base + +import ( + "context" + "fmt" + + "github.com/Tracky-Trains/tracky/api/gtfs" + "github.com/Tracky-Trains/tracky/api/providers" +) + +// Config holds the configuration for a standard GTFS provider. +type Config struct { + ProviderID string + Name string + StaticURL string + PositionsURL string // GTFS-RT vehicle positions feed; empty = skip + TripUpdatesURL string // GTFS-RT trip updates feed; empty = skip + RealtimeAPIKey string // optional Bearer token for GTFS-RT requests +} + +// Provider is a standard GTFS/GTFS-RT provider implementation. +// It satisfies providers.Provider and can be used directly for well-behaved feeds, +// or embedded in a custom provider struct that overrides specific methods. +type Provider struct { + cfg Config +} + +// New creates a Provider from the given config. +func New(cfg Config) *Provider { + return &Provider{cfg: cfg} +} + +// ID returns the provider's canonical identifier. +func (p *Provider) ID() string { + return p.cfg.ProviderID +} + +// FetchStatic downloads and parses the GTFS static zip, returning a StaticFeed. +func (p *Provider) FetchStatic(ctx context.Context) (*providers.StaticFeed, error) { + agencies, routes, stops, trips, stopTimes, calendars, exceptions, err := + gtfs.FetchAndParseStatic(ctx, p.cfg.StaticURL, p.cfg.ProviderID) + if err != nil { + return nil, fmt.Errorf("%s: FetchStatic: %w", p.cfg.ProviderID, err) + } + return &providers.StaticFeed{ + Agencies: agencies, + Routes: routes, + Stops: stops, + Trips: trips, + StopTimes: stopTimes, + Calendars: calendars, + Exceptions: exceptions, + }, nil +} + +// FetchRealtime fetches vehicle positions and trip updates, returning a combined RealtimeFeed. +// Either URL may be empty, in which case that portion is skipped. +func (p *Provider) FetchRealtime(ctx context.Context) (*providers.RealtimeFeed, error) { + feed := &providers.RealtimeFeed{} + + if p.cfg.PositionsURL != "" { + positions, err := gtfs.FetchAndParsePositions(ctx, p.cfg.PositionsURL, p.cfg.ProviderID, p.cfg.RealtimeAPIKey) + if err != nil { + return nil, fmt.Errorf("%s: FetchRealtime positions: %w", p.cfg.ProviderID, err) + } + feed.Positions = positions + } + + if p.cfg.TripUpdatesURL != "" { + stopTimes, err := gtfs.FetchAndParseTripUpdates(ctx, p.cfg.TripUpdatesURL, p.cfg.ProviderID, p.cfg.RealtimeAPIKey) + if err != nil { + return nil, fmt.Errorf("%s: FetchRealtime trip updates: %w", p.cfg.ProviderID, err) + } + feed.StopTimes = stopTimes + } + + return feed, nil +} diff --git a/apps/api/providers/brightline/brightline.go b/apps/api/providers/brightline/brightline.go new file mode 100644 index 0000000..a3e4e97 --- /dev/null +++ b/apps/api/providers/brightline/brightline.go @@ -0,0 +1,23 @@ +package brightline + +import ( + "github.com/Tracky-Trains/tracky/api/providers/base" +) + +const ( + staticURL = "http://feed.gobrightline.com/bl_gtfs.zip" + positionsURL = "http://feed.gobrightline.com/position_updates.pb" + tripUpdatesURL = "http://feed.gobrightline.com/trip_updates.pb" +) + +// New returns a standard base provider configured for Brightline. +// Brightline correctly implements both GTFS and GTFS-RT, so no overrides are needed. +func New() *base.Provider { + return base.New(base.Config{ + ProviderID: "brightline", + Name: "Brightline", + StaticURL: staticURL, + PositionsURL: positionsURL, + TripUpdatesURL: tripUpdatesURL, + }) +} diff --git a/apps/api/providers/metra/metra.go b/apps/api/providers/metra/metra.go new file mode 100644 index 0000000..ebd718a --- /dev/null +++ b/apps/api/providers/metra/metra.go @@ -0,0 +1,26 @@ +package metra + +import ( + "os" + + "github.com/Tracky-Trains/tracky/api/providers/base" +) + +const ( + staticURL = "https://schedules.metrarail.com/gtfs/schedule.zip" + positionsURL = "https://gtfspublic.metrarr.com/gtfs/public/positions" + tripUpdatesURL = "https://gtfspublic.metrarr.com/gtfs/public/tripupdates" +) + +// New returns a standard base provider configured for Metra. +// Metra requires a Bearer token for GTFS-RT; set METRA_API_KEY in the environment. +func New() *base.Provider { + return base.New(base.Config{ + ProviderID: "metra", + Name: "Metra", + StaticURL: staticURL, + PositionsURL: positionsURL, + TripUpdatesURL: tripUpdatesURL, + RealtimeAPIKey: os.Getenv("METRA_API_KEY"), + }) +} diff --git a/apps/api/providers/metrotransit/metrotransit.go b/apps/api/providers/metrotransit/metrotransit.go new file mode 100644 index 0000000..76b8318 --- /dev/null +++ b/apps/api/providers/metrotransit/metrotransit.go @@ -0,0 +1,24 @@ +package metrotransit + +import ( + "github.com/Tracky-Trains/tracky/api/providers/base" +) + +// see https://svc.metrotransit.org/ +const ( + staticURL = "https://svc.metrotransit.org/mtgtfs/next/gtfs.zip" + positionsURL = "https://svc.metrotransit.org/mtgtfs/vehiclepositions.pb" + tripUpdatesURL = "https://svc.metrotransit.org/mtgtfs/tripupdates.pb" +) + +// Returns a standard base provider configured for Metro Transit. + +func New() *base.Provider { + return base.New(base.Config{ + ProviderID: "metrotransit", + Name: "Metro Transit", + StaticURL: staticURL, + PositionsURL: positionsURL, + TripUpdatesURL: tripUpdatesURL, + }) +} diff --git a/apps/api/providers/providers.go b/apps/api/providers/providers.go new file mode 100644 index 0000000..ce9cef2 --- /dev/null +++ b/apps/api/providers/providers.go @@ -0,0 +1,70 @@ +package providers + +import ( + "context" + "fmt" + "sort" + + "github.com/Tracky-Trains/tracky/api/spec" +) + +// Provider is the interface every transit data provider must implement. +type Provider interface { + ID() string + FetchStatic(ctx context.Context) (*StaticFeed, error) + FetchRealtime(ctx context.Context) (*RealtimeFeed, error) +} + +// StaticFeed holds all data parsed from a GTFS static zip. +type StaticFeed struct { + Agencies []spec.Agency `json:"agencies"` + Routes []spec.Route `json:"routes"` + Stops []spec.Stop `json:"stops"` + Trips []spec.Trip `json:"trips"` + StopTimes []spec.ScheduledStopTime `json:"stopTimes"` + Calendars []spec.ServiceCalendar `json:"calendars"` + Exceptions []spec.ServiceException `json:"exceptions"` +} + +// RealtimeFeed holds all data parsed from a GTFS-RT protobuf feed. +type RealtimeFeed struct { + Positions []spec.TrainPosition `json:"positions"` + StopTimes []spec.TrainStopTime `json:"stopTimes"` +} + +// Registry maps provider IDs to their implementations. +type Registry struct { + providers map[string]Provider +} + +// NewRegistry returns an empty Registry. +func NewRegistry() *Registry { + return &Registry{providers: make(map[string]Provider)} +} + +// Register adds a provider to the registry. Panics on duplicate ID. +func (r *Registry) Register(p Provider) { + id := p.ID() + if _, exists := r.providers[id]; exists { + panic(fmt.Sprintf("providers: duplicate provider ID %q", id)) + } + r.providers[id] = p +} + +// Get returns the provider with the given ID. +func (r *Registry) Get(id string) (Provider, bool) { + p, ok := r.providers[id] + return p, ok +} + +// All returns all registered providers sorted by ID. +func (r *Registry) All() []Provider { + out := make([]Provider, 0, len(r.providers)) + for _, p := range r.providers { + out = append(out, p) + } + sort.Slice(out, func(i, j int) bool { + return out[i].ID() < out[j].ID() + }) + return out +} diff --git a/apps/api/providers/trirail/trirail.go b/apps/api/providers/trirail/trirail.go new file mode 100644 index 0000000..028e30a --- /dev/null +++ b/apps/api/providers/trirail/trirail.go @@ -0,0 +1,31 @@ +package trirail + +import ( + "github.com/Tracky-Trains/tracky/api/providers/base" +) + +// see https://gtfsr.tri-rail.com/ +const ( + staticURL = "https://gtfs.tri-rail.com/gtfs.zip" + positionsURL = "https://gtfsr.tri-rail.com/download.aspx?file=position_updates.pb" + tripUpdatesURL = "https://gtfsr.tri-rail.com/download.aspx?file=trip_updates.pb" +) + +// Returns a standard base provider configured for Tri-Rail. +// +// Known issues with Tri-Rail's GTFS-RT feed (trip_updates): +// - stop_time_update entries omit stop_id — stops are identified only by stop_sequence. +// Resolving stop codes requires a static GTFS lookup by (trip_id, stop_sequence). +// - arrival/departure times are delay-only (e.g. {"delay": 0}) with no absolute time field. +// Estimated times must be derived by adding the delay to the static scheduled time. +// - trip entries omit route_id and start_date, so RouteID and RunDate are always empty. +// Both fields must be resolved from static GTFS using trip_id. +func New() *base.Provider { + return base.New(base.Config{ + ProviderID: "trirail", + Name: "Tri-Rail", + StaticURL: staticURL, + PositionsURL: positionsURL, + TripUpdatesURL: tripUpdatesURL, + }) +} diff --git a/apps/api/routes/routes.go b/apps/api/routes/routes.go new file mode 100644 index 0000000..8e26721 --- /dev/null +++ b/apps/api/routes/routes.go @@ -0,0 +1,105 @@ +package routes + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/Tracky-Trains/tracky/api/providers" +) + +// Setup registers all routes onto mux. +func Setup(mux *http.ServeMux, registry *providers.Registry) { + mux.HandleFunc("GET /debug/providers", handleListProviders(registry)) + mux.HandleFunc("GET /debug/providers/{id}/static", handleSyncStatic(registry)) + mux.HandleFunc("GET /debug/providers/{id}/realtime", handleSyncRealtime(registry)) +} + +func writeJSON(w http.ResponseWriter, status int, v any) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + json.NewEncoder(w).Encode(v) +} + +// handleListProviders returns all registered providers. +func handleListProviders(registry *providers.Registry) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + type entry struct { + ID string `json:"id"` + StaticEndpoint string `json:"static_endpoint"` + RealtimeEndpoint string `json:"realtime_endpoint"` + } + all := registry.All() + out := make([]entry, len(all)) + for i, p := range all { + out[i] = entry{ + ID: p.ID(), + StaticEndpoint: "/debug/providers/" + p.ID() + "/static", + RealtimeEndpoint: "/debug/providers/" + p.ID() + "/realtime", + } + } + writeJSON(w, http.StatusOK, out) + } +} + +// handleSyncStatic triggers a GTFS static fetch for the given provider. +func handleSyncStatic(registry *providers.Registry) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + id := r.PathValue("id") + p, ok := registry.Get(id) + if !ok { + writeJSON(w, http.StatusNotFound, map[string]string{"error": "provider not found"}) + return + } + + start := time.Now() + feed, err := p.FetchStatic(r.Context()) + elapsed := time.Since(start).Milliseconds() + + if err != nil { + writeJSON(w, http.StatusBadGateway, map[string]any{ + "provider": id, + "error": err.Error(), + "elapsed_ms": elapsed, + }) + return + } + + writeJSON(w, http.StatusOK, map[string]any{ + "provider": id, + "elapsed_ms": elapsed, + "data": feed, + }) + } +} + +// handleSyncRealtime triggers a GTFS-RT fetch for the given provider. +func handleSyncRealtime(registry *providers.Registry) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + id := r.PathValue("id") + p, ok := registry.Get(id) + if !ok { + writeJSON(w, http.StatusNotFound, map[string]string{"error": "provider not found"}) + return + } + + start := time.Now() + feed, err := p.FetchRealtime(r.Context()) + elapsed := time.Since(start).Milliseconds() + + if err != nil { + writeJSON(w, http.StatusBadGateway, map[string]any{ + "provider": id, + "error": err.Error(), + "elapsed_ms": elapsed, + }) + return + } + + writeJSON(w, http.StatusOK, map[string]any{ + "provider": id, + "elapsed_ms": elapsed, + "data": feed, + }) + } +} diff --git a/apps/api/spec/realtime.go b/apps/api/spec/realtime.go new file mode 100644 index 0000000..39cedee --- /dev/null +++ b/apps/api/spec/realtime.go @@ -0,0 +1,61 @@ +package spec + +import "time" + +// TrainPosition represents the current live position of an active train run. +// One row per (provider, trip_id, run_date). Upserted on every poll. +type TrainPosition struct { + Provider string `db:"provider" json:"provider"` + TripID string `db:"trip_id" json:"tripId"` + RunDate time.Time `db:"run_date" json:"runDate"` + TrainNumber string `db:"train_number" json:"trainNumber"` + RouteID string `db:"route_id" json:"routeId"` + VehicleID string `db:"vehicle_id" json:"vehicleId"` + Lat *float64 `db:"lat" json:"lat"` + Lon *float64 `db:"lon" json:"lon"` + Heading *string `db:"heading" json:"heading"` + SpeedMPH *float64 `db:"speed_mph" json:"speedMph"` + CurrentStopCode *string `db:"current_stop_code" json:"currentStopCode"` + LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` +} + +// TrainStopTime represents a single stop within a single run of a trip. +// Serves dual purpose: live state (estimated times) and historical record (actual times). +// One row per (provider, trip_id, run_date, stop_code). +type TrainStopTime struct { + Provider string `db:"provider" json:"provider"` + TripID string `db:"trip_id" json:"tripId"` + RunDate time.Time `db:"run_date" json:"runDate"` + StopCode string `db:"stop_code" json:"stopCode"` + StopSequence int `db:"stop_sequence" json:"stopSequence"` + + // From static GTFS — written once, never updated + ScheduledArr *time.Time `db:"scheduled_arr" json:"scheduledArr"` + ScheduledDep *time.Time `db:"scheduled_dep" json:"scheduledDep"` + + // Live estimates — updated each poll until actual is known + EstimatedArr *time.Time `db:"estimated_arr" json:"estimatedArr"` + EstimatedDep *time.Time `db:"estimated_dep" json:"estimatedDep"` + + // Actuals — written once when train passes stop, permanent + ActualArr *time.Time `db:"actual_arr" json:"actualArr"` + ActualDep *time.Time `db:"actual_dep" json:"actualDep"` + + LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` +} + +// IsPassed returns true if the train has already passed this stop. +func (s *TrainStopTime) IsPassed() bool { + return s.ActualArr != nil +} + +// IsLive returns true if this stop still has a pending estimate. +func (s *TrainStopTime) IsLive() bool { + return s.ActualArr == nil && s.EstimatedArr != nil +} + +// RunID returns a canonical string identifier for this specific train run. +// Useful for logging, caching keys, and display. +func (s *TrainStopTime) RunID() string { + return s.Provider + ":" + s.TripID + ":" + s.RunDate.Format("2006-01-02") +} diff --git a/apps/api/spec/schedule.go b/apps/api/spec/schedule.go new file mode 100644 index 0000000..b256885 --- /dev/null +++ b/apps/api/spec/schedule.go @@ -0,0 +1,95 @@ +package spec + +import "time" + +// Agency represents a transit operator. +// Maps to GTFS agency.txt. Root of the namespace for all entities. +// Replaces the custom Provider concept — agency_id is the canonical identifier. +type Agency struct { + AgencyID string `db:"agency_id" json:"agencyId"` // namespaced: 'amtrak', 'via', 'brightline' + Name string `db:"name" json:"name"` // 'National Railroad Passenger Corporation' + URL string `db:"url" json:"url"` // 'https://www.amtrak.com' + Timezone string `db:"timezone" json:"timezone"` // 'America/New_York' + Lang *string `db:"lang" json:"lang"` // 'en' + Phone *string `db:"phone" json:"phone"` + Country string `db:"country" json:"country"` // 'US', 'CA' — extension, not in GTFS spec +} + +// Route represents a named service operated by an agency. +// Maps to GTFS routes.txt. +type Route struct { + AgencyID string `db:"agency_id" json:"agencyId"` // 'amtrak' + RouteID string `db:"route_id" json:"routeId"` // namespaced: 'amtrak:coast-starlight' + ShortName string `db:"short_name" json:"shortName"` // '14' + LongName string `db:"long_name" json:"longName"` // 'Coast Starlight' + Color string `db:"color" json:"color"` // hex without #, e.g. '1D2E6E' + TextColor string `db:"text_color" json:"textColor"` // hex without #, e.g. 'FFFFFF' + ShapeID *string `db:"shape_id" json:"shapeId"` // reference into tile layer, not a DB table +} + +// Stop represents a physical station or stop. +// Maps to GTFS stops.txt. +type Stop struct { + AgencyID string `db:"agency_id" json:"agencyId"` // 'amtrak' + StopID string `db:"stop_id" json:"stopId"` // namespaced: 'amtrak:LAX' + Code string `db:"code" json:"code"` // native code: 'LAX' + Name string `db:"name" json:"name"` // 'Los Angeles' + Lat float64 `db:"lat" json:"lat"` + Lon float64 `db:"lon" json:"lon"` + Timezone *string `db:"timezone" json:"timezone"` // stop-local tz if different from agency + WheelchairBoarding *bool `db:"wheelchair_boarding" json:"wheelchairBoarding"` +} + +// Trip represents a scheduled service pattern. +// Maps to GTFS trips.txt — one row per trip_id in the feed. +// Note: a Trip is the template; a run is Trip + RunDate. +type Trip struct { + AgencyID string `db:"agency_id" json:"agencyId"` // 'amtrak' + TripID string `db:"trip_id" json:"tripId"` // namespaced: 'amtrak:5' + RouteID string `db:"route_id" json:"routeId"` // 'amtrak:coast-starlight' + ServiceID string `db:"service_id" json:"serviceId"` // links to ServiceCalendar + Headsign string `db:"headsign" json:"headsign"` // 'Chicago' + ShapeID *string `db:"shape_id" json:"shapeId"` // for geometry lookup, matches Route.ShapeID + DirectionID *int `db:"direction_id" json:"directionId"` // 0=outbound, 1=inbound +} + +// ScheduledStopTime represents a trip's scheduled arrival/departure at a stop. +// Maps to GTFS stop_times.txt. Static timetable only — never updated. +// Actual and estimated times live in TrainStopTime (realtime model). +type ScheduledStopTime struct { + AgencyID string `db:"agency_id" json:"agencyId"` + TripID string `db:"trip_id" json:"tripId"` + StopID string `db:"stop_id" json:"stopId"` + StopSequence int `db:"stop_sequence" json:"stopSequence"` + ArrivalTime *string `db:"arrival_time" json:"arrivalTime"` // string: GTFS allows >24:00:00 + DepartureTime *string `db:"departure_time" json:"departureTime"` // string: same reason + Timepoint *bool `db:"timepoint" json:"timepoint"` // true=exact, false=approximate + DropOffType *int `db:"drop_off_type" json:"dropOffType"` // 0=regular, 1=none, 2=phone, 3=arrange + PickupType *int `db:"pickup_type" json:"pickupType"` // same codes +} + +// ServiceCalendar represents which days of the week a service_id runs. +// Maps to GTFS calendar.txt. +type ServiceCalendar struct { + AgencyID string `db:"agency_id" json:"agencyId"` + ServiceID string `db:"service_id" json:"serviceId"` + Monday bool `db:"monday" json:"monday"` + Tuesday bool `db:"tuesday" json:"tuesday"` + Wednesday bool `db:"wednesday" json:"wednesday"` + Thursday bool `db:"thursday" json:"thursday"` + Friday bool `db:"friday" json:"friday"` + Saturday bool `db:"saturday" json:"saturday"` + Sunday bool `db:"sunday" json:"sunday"` + StartDate time.Time `db:"start_date" json:"startDate"` + EndDate time.Time `db:"end_date" json:"endDate"` +} + +// ServiceException represents a one-off addition or removal of service. +// Maps to GTFS calendar_dates.txt. +// Note: calendar.txt is optional if calendar_dates.txt covers all service dates. +type ServiceException struct { + AgencyID string `db:"agency_id" json:"agencyId"` + ServiceID string `db:"service_id" json:"serviceId"` + Date time.Time `db:"date" json:"date"` + ExceptionType int `db:"exception_type" json:"exceptionType"` // 1=service added, 2=service removed +}