From f374b04f9ccae28b06f12a56f22340e85d923ff7 Mon Sep 17 00:00:00 2001 From: Francia Csaba Date: Mon, 11 Dec 2023 11:00:32 +0100 Subject: [PATCH] Added services and models --- amarillo/app/models/AgencyConf.py | 26 ++ amarillo/app/models/Carpool.py | 284 +++++++++++++ amarillo/app/models/__init__.py | 0 amarillo/app/models/gtfs.py | 29 ++ amarillo/app/services/agencies.py | 25 ++ amarillo/app/services/agencyconf.py | 111 ++++++ amarillo/app/services/carpools.py | 61 +++ amarillo/app/services/config.py | 11 + amarillo/app/services/gtfs.py | 137 +++++++ amarillo/app/services/gtfs_constants.py | 14 + amarillo/app/services/gtfsrt/__init__.py | 0 .../app/services/gtfsrt/gtfs_realtime_pb2.py | 80 ++++ .../services/gtfsrt/realtime_extension_pb2.py | 33 ++ amarillo/app/services/regions.py | 22 ++ amarillo/app/services/routing.py | 47 +++ amarillo/app/services/stops.py | 182 +++++++++ amarillo/app/services/trips.py | 374 ++++++++++++++++++ 17 files changed, 1436 insertions(+) create mode 100644 amarillo/app/models/AgencyConf.py create mode 100644 amarillo/app/models/Carpool.py create mode 100644 amarillo/app/models/__init__.py create mode 100644 amarillo/app/models/gtfs.py create mode 100644 amarillo/app/services/agencies.py create mode 100644 amarillo/app/services/agencyconf.py create mode 100644 amarillo/app/services/carpools.py create mode 100644 amarillo/app/services/config.py create mode 100644 amarillo/app/services/gtfs.py create mode 100644 amarillo/app/services/gtfs_constants.py create mode 100644 amarillo/app/services/gtfsrt/__init__.py create mode 100644 amarillo/app/services/gtfsrt/gtfs_realtime_pb2.py create mode 100644 amarillo/app/services/gtfsrt/realtime_extension_pb2.py create mode 100644 amarillo/app/services/regions.py create mode 100644 amarillo/app/services/routing.py create mode 100644 amarillo/app/services/stops.py create mode 100644 amarillo/app/services/trips.py diff --git a/amarillo/app/models/AgencyConf.py b/amarillo/app/models/AgencyConf.py new file mode 100644 index 0000000..29ac2f5 --- /dev/null +++ b/amarillo/app/models/AgencyConf.py @@ -0,0 +1,26 @@ +from pydantic import ConfigDict, BaseModel, Field + + +class AgencyConf(BaseModel): + agency_id: str = Field( + description="ID of the agency that uses this token.", + min_length=1, + max_length=20, + pattern='^[a-zA-Z0-9]+$', + examples=["mfdz"]) + + api_key: str = Field( + description="The agency's API key for using the API", + min_length=20, + max_length=256, + pattern=r'^[a-zA-Z0-9]+$', + examples=["d8yLuY4DqMEUCLcfJASi"]) + model_config = ConfigDict(json_schema_extra={ + "title": "Agency Configuration", + "description": "Configuration for an agency.", + "example": + { + "agency_id": "mfdz", + "api_key": "d8yLuY4DqMEUCLcfJASi" + } + }) diff --git a/amarillo/app/models/Carpool.py b/amarillo/app/models/Carpool.py new file mode 100644 index 0000000..1371c27 --- /dev/null +++ b/amarillo/app/models/Carpool.py @@ -0,0 +1,284 @@ +from datetime import time, date, datetime +from pydantic import ConfigDict, BaseModel, Field, HttpUrl, EmailStr +from typing import List, Union, Set, Optional, Tuple +from datetime import time +from pydantic import BaseModel, Field +from geojson_pydantic.geometries import LineString +from enum import Enum + +NumType = Union[float, int] + +MAX_STOPS_PER_TRIP = 100 + +class Weekday(str, Enum): + monday = "monday" + tuesday = "tuesday" + wednesday = "wednesday" + thursday = "thursday" + friday = "friday" + saturday = "saturday" + sunday = "sunday" + +class PickupDropoffType(str, Enum): + pickup_and_dropoff = "pickup_and_dropoff" + only_pickup = "only_pickup" + only_dropoff = "only_dropoff" + +class StopTime(BaseModel): + id: Optional[str] = Field( + None, + description="Optional Stop ID. If given, it should conform to the " + "IFOPT specification. For official transit stops, " + "it should be their official IFOPT. In Germany, this is " + "the DHID which is available via the 'zentrales " + "Haltestellenverzeichnis (zHV)', published by DELFI e.V. " + "Note, that currently carpooling location.", + pattern=r"^([a-zA-Z]{2,6}):\d+:\d+(:\d*(:\w+)?)?$|^osm:[nwr]\d+$", + examples=["de:12073:900340137::2"]) + + name: str = Field( + description="Name of the location. Use a name that people will " + "understand in the local and tourist vernacular.", + min_length=1, + max_length=256, + examples=["Angermünde, Breitscheidstr."]) + + departureTime: Optional[str] = Field( + None, + description="Departure time from a specific stop for a specific " + "carpool trip. For times occurring after midnight on the " + "service day, the time is given as a value greater than " + "24:00:00 in HH:MM:SS local time for the day on which the " + "trip schedule begins. If there are not separate times for " + "arrival and departure at a stop, the same value for arrivalTime " + "and departureTime. Note, that arrivalTime/departureTime of " + "the stops are not mandatory, and might then be estimated by " + "this service.", + pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$", + examples=["17:00"] + ) + + arrivalTime: Optional[str] = Field( + None, + description="Arrival time at a specific stop for a specific trip on a " + "carpool route. If there are not separate times for arrival " + "and departure at a stop, enter the same value for arrivalTime " + "and departureTime. For times occurring after midnight on the " + "service day, the time as a value greater than 24:00:00 in " + "HH:MM:SS local time for the day on which the trip schedule " + "begins. Note, that arrivalTime/departureTime of the stops " + "are not mandatory, and might then be estimated by this " + "service.", + pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$", + examples=["18:00"]) + + lat: float = Field( + description="Latitude of the location. Should describe the location " + "where a passenger may mount/dismount the vehicle.", + ge=-90, + lt=90, + examples=["53.0137311391"]) + + lon: float = Field( + description="Longitude of the location. Should describe the location " + "where a passenger may mount/dismount the vehicle.", + ge=-180, + lt=180, + examples=["13.9934706687"]) + + pickup_dropoff: Optional[PickupDropoffType] = Field( + None, description="If passengers may be picked up, dropped off or both at this stop. " + "If not specified, this service may assign this according to some custom rules. " + "E.g. Amarillo may allow pickup only for the first third of the distance travelled, " + "and dropoff only for the last third." , + examples=["only_pickup"] + ) + model_config = ConfigDict(json_schema_extra={ + "example": "{'id': 'de:12073:900340137::2', 'name': " + "'Angermünde, Breitscheidstr.', 'lat': 53.0137311391, " + "'lon': 13.9934706687}" + }) + +class Region(BaseModel): + id: str = Field( + description="ID of the region.", + min_length=1, + max_length=20, + pattern='^[a-zA-Z0-9]+$', + examples=["bb"]) + + bbox: Tuple[NumType, NumType, NumType, NumType] = Field( + description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]", + examples=[[10.5,49.2,11.3,51.3]]) + +class Agency(BaseModel): + id: str = Field( + description="ID of the agency.", + min_length=1, + max_length=20, + pattern='^[a-zA-Z0-9]+$', + examples=["mfdz"]) + + name: str = Field( + description="Name", + min_length=1, + max_length=48, + pattern=r'^[\w -\.\|]+$', + examples=["MITFAHR|DE|ZENTRALE"]) + + url: HttpUrl = Field( + description="URL of the carpool agency.", + examples=["https://mfdz.de/"]) + + timezone: str = Field( + description="Timezone where the carpool agency is located.", + min_length=1, + max_length=48, + pattern=r'^[\w/]+$', + examples=["Europe/Berlin"]) + + lang: str = Field( + description="Primary language used by this carpool agency.", + min_length=1, + max_length=2, + pattern=r'^[a-zA-Z_]+$', + examples=["de"]) + + email: EmailStr = Field( + description="""Email address actively monitored by the agency’s + customer service department. This email address should be a direct + contact point where carpool riders can reach a customer service + representative at the agency.""", + examples=["info@mfdz.de"]) + + terms_url: Optional[HttpUrl] = Field( + None, description="""A fully qualified URL pointing to the terms of service + (also often called "terms of use" or "terms and conditions") + for the service.""", + examples=["https://mfdz.de/nutzungsbedingungen"]) + + privacy_url: Optional[HttpUrl] = Field( + None, description="""A fully qualified URL pointing to the privacy policy for the service.""", + examples=["https://mfdz.de/datenschutz"]) + model_config = ConfigDict(json_schema_extra={ + "title": "Agency", + "description": "Carpool agency.", + "example": + #""" + { + "id": "mfdz", + "name": "MITFAHR|DE|ZENTRALE", + "url": "http://mfdz.de", + "timezone": "Europe/Berlin", + "lang": "de", + "email": "info@mfdz.de", + "terms_url": "https://mfdz.de/nutzungsbedingungen", + "privacy_url": "https://mfdz.de/datenschutz", + } + #""" + }) + +class Carpool(BaseModel): + id: str = Field( + description="ID of the carpool. Should be supplied and managed by the " + "carpooling platform which originally published this " + "offer.", + min_length=1, + max_length=256, + pattern='^[a-zA-Z0-9_-]+$', + examples=["103361"]) + + agency: str = Field( + description="Short one string name of the agency, used as a namespace " + "for ids.", + min_length=1, + max_length=20, + pattern='^[a-zA-Z0-9]+$', + examples=["mfdz"]) + + deeplink: HttpUrl = Field( + description="Link to an information page providing detail information " + "for this offer, and, especially, an option to book the " + "trip/contact the driver.", + examples=["https://mfdz.de/trip/103361"]) + + stops: List[StopTime] = Field( + ..., + min_length=2, + max_length=MAX_STOPS_PER_TRIP, + description="Stops which this carpool passes by and offers to pick " + "up/drop off passengers. This list must at minimum " + "include two stops, the origin and destination of this " + "carpool trip. Note that for privacy reasons, the stops " + "usually should be official locations, like meeting " + "points, carpool parkings, ridesharing benches or " + "similar.", + examples=["""[ + { + "id": "03", + "name": "drei", + "lat": 45, + "lon": 9 + }, + { + "id": "03b", + "name": "drei b", + "lat": 45, + "lon": 9 + } + ]"""]) + + # TODO can be removed, as first stop has departureTime as well + departureTime: time = Field( + description="Time when the carpool leaves at the first stop. Note, " + "that this API currently does not support flexible time " + "windows for departure, though drivers might be flexible." + "For recurring trips, the weekdays this trip will run. ", + examples=["17:00"]) + + # TODO think about using googlecal Format + departureDate: Union[date, Set[Weekday]] = Field( + description="Date when the trip will start, in case it is a one-time " + "trip. For recurring trips, specify weekdays. " + "Note, that when for different weekdays different " + "departureTimes apply, multiple carpool offers should be " + "published.", + examples=['A single date 2022-04-04 or a list of weekdays ["saturday", ' + '"sunday"]']) + + path: Optional[LineString] = Field( + None, description="Optional route geometry as json LineString.") + + lastUpdated: Optional[datetime] = Field( + None, + description="LastUpdated should reflect the last time, the user " + "providing this offer, made an update or confirmed, " + "the offer is still valid. Note that this service might " + "purge outdated offers (e.g. older than 180 days). If not " + "passed, the service may assume 'now'", + examples=["2022-02-13T20:20:39+00:00"]) + model_config = ConfigDict(json_schema_extra={ + "title": "Carpool", + # description ... + "example": + """ + { + "id": "1234", + "agency": "mfdz", + "deeplink": "http://mfdz.de", + "stops": [ + { + "id": "de:12073:900340137::2", "name": "ABC", + "lat": 45, "lon": 9 + }, + { + "id": "de:12073:900340137::3", "name": "XYZ", + "lat": 45, "lon": 9 + } + ], + "departureTime": "12:34", + "departureDate": "2022-03-30", + "lastUpdated": "2022-03-30T12:34:00+00:00" + } + """ + }) diff --git a/amarillo/app/models/__init__.py b/amarillo/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/amarillo/app/models/gtfs.py b/amarillo/app/models/gtfs.py new file mode 100644 index 0000000..ee7f701 --- /dev/null +++ b/amarillo/app/models/gtfs.py @@ -0,0 +1,29 @@ +from collections import namedtuple +from datetime import timedelta + +GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version') +GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email') +GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name') +GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name') +GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint') +GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed') +GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday') +GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type') +GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence') + +# TODO Move to utils +class GtfsTimeDelta(timedelta): + def __str__(self): + seconds = self.total_seconds() + hours = seconds // 3600 + minutes = (seconds % 3600) // 60 + seconds = seconds % 60 + str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds)) + return (str) + + def __add__(self, other): + if isinstance(other, timedelta): + return self.__class__(self.days + other.days, + self.seconds + other.seconds, + self.microseconds + other.microseconds) + return NotImplemented \ No newline at end of file diff --git a/amarillo/app/services/agencies.py b/amarillo/app/services/agencies.py new file mode 100644 index 0000000..0f3afae --- /dev/null +++ b/amarillo/app/services/agencies.py @@ -0,0 +1,25 @@ +import json +from glob import glob +from typing import Dict + +from app.models.Carpool import Agency + +# TODO FG HB this service should also listen to pyinotify +# because the (updated) agencies are needed in the enhancer +# as well. + +class AgencyService: + + def __init__(self): + self.agencies: Dict[str, Agency] = {} + + for agency_file_name in glob('conf/agency/*.json'): + with open(agency_file_name) as agency_file: + dict = json.load(agency_file) + agency = Agency(**dict) + agency_id = agency.id + self.agencies[agency_id] = agency + + def get_agency(self, agency_id: str) -> Agency: + agency = self.agencies.get(agency_id) + return agency diff --git a/amarillo/app/services/agencyconf.py b/amarillo/app/services/agencyconf.py new file mode 100644 index 0000000..7d90a40 --- /dev/null +++ b/amarillo/app/services/agencyconf.py @@ -0,0 +1,111 @@ +import json +import os +from glob import glob +from typing import Dict, List +import logging + +from fastapi import HTTPException, status + +from app.models.AgencyConf import AgencyConf +from app.services.config import config + +logger = logging.getLogger(__name__) + +agency_conf_directory = 'data/agencyconf' + + +class AgencyConfService: + + def __init__(self): + # Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse + # cache for the first for fast lookup of valid api keys, which happens on *every* request. + self.agency_id_to_agency_conf: Dict[str, AgencyConf] = {} + self.api_key_to_agency_id: Dict[str, str] = {} + + for agency_conf_file_name in glob(f'{agency_conf_directory}/*.json'): + with open(agency_conf_file_name) as agency_conf_file: + dictionary = json.load(agency_conf_file) + + agency_conf = AgencyConf(**dictionary) + + agency_id = agency_conf.agency_id + api_key = agency_conf.api_key + + self.agency_id_to_agency_conf[agency_id] = agency_conf + self.api_key_to_agency_id[api_key] = agency_conf.agency_id + + def get_agency_conf(self, agency_id: str) -> AgencyConf: + agency_conf = self.agency_id_to_agency_conf.get(agency_id) + return agency_conf + + def check_api_key(self, api_key: str) -> str: + """Check if the API key is valid + + The agencies' api keys are checked first, and the admin's key. + + The agency_id or "admin" is returned for further checks in the caller if the + request is permitted, like {agency_id} == agency_id. + """ + + agency_id = self.api_key_to_agency_id.get(api_key) + + is_agency = agency_id is not None + + if is_agency: + return agency_id + + is_admin = api_key == config.admin_token + + if is_admin: + return "admin" + + message = "X-API-Key header invalid" + logger.error(message) + raise HTTPException(status_code=400, detail=message) + + def add(self, agency_conf: AgencyConf): + + agency_id = agency_conf.agency_id + api_key = agency_conf.api_key + + agency_id_exists_already = self.agency_id_to_agency_conf.get(agency_id) is not None + + if agency_id_exists_already: + message = f"Agency {agency_id} exists already. To update, delete it first." + logger.error(message) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) + + agency_using_this_api_key_already = self.api_key_to_agency_id.get(api_key) + a_different_agency_is_using_this_api_key_already = \ + agency_using_this_api_key_already is not None and \ + agency_using_this_api_key_already != agency_id + + if a_different_agency_is_using_this_api_key_already: + message = f"Duplicate API Key for {agency_id} not permitted. Use a different key." + logger.error(message) + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) + + with open(f'{agency_conf_directory}/{agency_id}.json', 'w', encoding='utf-8') as f: + f.write(agency_conf.json()) + + self.agency_id_to_agency_conf[agency_id] = agency_conf + self.api_key_to_agency_id[api_key] = agency_id + + logger.info(f"Added configuration for agency {agency_id}.") + + def get_agency_ids(self) -> List[str]: + return list(self.agency_id_to_agency_conf.keys()) + + def delete(self, agency_id): + + agency_conf = self.agency_id_to_agency_conf.get(agency_id) + + api_key = agency_conf.api_key + + del self.api_key_to_agency_id[api_key] + + del self.agency_id_to_agency_conf[agency_id] + + os.remove(f'{agency_conf_directory}/{agency_id}.json') + + logger.info(f"Deleted configuration for agency {agency_id}.") diff --git a/amarillo/app/services/carpools.py b/amarillo/app/services/carpools.py new file mode 100644 index 0000000..2f173eb --- /dev/null +++ b/amarillo/app/services/carpools.py @@ -0,0 +1,61 @@ +import json +import logging +from datetime import datetime +from typing import Dict +from app.models.Carpool import Carpool +from app.services.trips import TripStore +from app.utils.utils import yesterday, is_older_than_days + +logger = logging.getLogger(__name__) + +class CarpoolService(): + MAX_OFFER_AGE_IN_DAYS = 180 + + def __init__(self, trip_store): + + self.trip_store = trip_store + self.carpools: Dict[str, Carpool] = {} + + def is_outdated(self, carpool): + """ + A carpool offer is outdated, if + * it's completly in the past (if it's a single date offer). + As we know the start time but not latest arrival, we deem + offers starting the day before yesterday as outdated + * it's last update occured before MAX_OFFER_AGE_IN_DAYS + """ + runs_once = not isinstance(carpool.departureDate, set) + return (is_older_than_days(carpool.lastUpdated.date(), self.MAX_OFFER_AGE_IN_DAYS) or + (runs_once and carpool.departureDate < yesterday())) + + def purge_outdated_offers(self): + """ + Iterates over all carpools and deletes those which are outdated + """ + for key in list(self.carpools.keys()): + cp = self.carpools.get(key) + if cp and self.is_outdated(cp): + logger.info("Purge outdated offer %s", key) + self.delete(cp.agency, cp.id) + + def get(self, agency_id: str, carpool_id: str): + return self.carpools.get(f"{agency_id}:{carpool_id}") + + def get_all_ids(self): + return list(self.carpools) + + def put(self, agency_id: str, carpool_id: str, carpool): + self.carpools[f"{agency_id}:{carpool_id}"] = carpool + # Outdated trips (which might have been in the store) + # will be deleted + if self.is_outdated(carpool): + logger.info('Deleting outdated carpool %s:%s', agency_id, carpool_id) + self.delete(agency_id, carpool_id) + else: + self.trip_store.put_carpool(carpool) + + def delete(self, agency_id: str, carpool_id: str): + id = f"{agency_id}:{carpool_id}" + if id in self.carpools: + del self.carpools[id] + self.trip_store.delete_carpool(agency_id, carpool_id) diff --git a/amarillo/app/services/config.py b/amarillo/app/services/config.py new file mode 100644 index 0000000..7266100 --- /dev/null +++ b/amarillo/app/services/config.py @@ -0,0 +1,11 @@ +from typing import List +from pydantic_settings import BaseSettings + + +class Config(BaseSettings): + admin_token: str + ride2go_query_data: str + env: str = 'DEV' + + +config = Config(_env_file='config', _env_file_encoding='utf-8') diff --git a/amarillo/app/services/gtfs.py b/amarillo/app/services/gtfs.py new file mode 100644 index 0000000..b3f0d70 --- /dev/null +++ b/amarillo/app/services/gtfs.py @@ -0,0 +1,137 @@ +import app.services.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2 +import app.services.gtfsrt.realtime_extension_pb2 as mfdzrte +from app.services.gtfs_constants import * +from google.protobuf.json_format import MessageToDict +from google.protobuf.json_format import ParseDict +from datetime import datetime, timedelta +import json +import re +import time + +class GtfsRtProducer(): + + def __init__(self, trip_store): + self.trip_store = trip_store + + def generate_feed(self, time, format='protobuf', bbox=None): + # See https://developers.google.com/transit/gtfs-realtime/reference + # https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java + gtfsrt_dict = { + 'header': { + 'gtfsRealtimeVersion': '1.0', + 'timestamp': int(time) + }, + 'entity': self._get_trip_updates(bbox) + } + feed = gtfs_realtime_pb2.FeedMessage() + ParseDict(gtfsrt_dict, feed) + + if "message" == format.lower(): + return feed + elif "json" == format.lower(): + return MessageToDict(feed) + else: + return feed.SerializeToString() + + def export_feed(self, timestamp, file_path, bbox=None): + """ + Exports gtfs-rt feed as .json and .pbf file to file_path + """ + feed = self.generate_feed(timestamp, "message", bbox) + with open(f"{file_path}.pbf", "wb") as f: + f.write(feed.SerializeToString()) + with open(f"{file_path}.json", "w") as f: + json.dump(MessageToDict(feed), f) + + def _get_trip_updates(self, bbox = None): + trips = [] + trips.extend(self._get_added(bbox)) + trips.extend(self._get_deleted(bbox)) + trip_updates = [] + for num, trip in enumerate(trips): + trip_updates.append( { + 'id': f'carpool-update-{num}', + 'tripUpdate': trip + } + ) + return trip_updates + + def _get_deleted(self, bbox = None): + return self._get_updates( + self.trip_store.recently_deleted_trips(), + self._as_delete_updates, + bbox) + + def _get_added(self, bbox = None): + return self._get_updates( + self.trip_store.recently_added_trips(), + self._as_added_updates, + bbox) + + def _get_updates(self, trips, update_func, bbox = None): + updates = [] + today = datetime.today() + for t in trips: + if bbox == None or t.intersects(bbox): + updates.extend(update_func(t, today)) + return updates + + def _as_delete_updates(self, trip, fromdate): + return [{ + 'trip': { + 'tripId': trip.trip_id, + 'startTime': trip.start_time_str(), + 'startDate': trip_date, + 'scheduleRelationship': 'CANCELED', + 'routeId': trip.trip_id + } + } for trip_date in trip.next_trip_dates(fromdate)] + + def _to_seconds(self, fromdate, stop_time): + startdate = datetime.strptime(fromdate, '%Y%m%d') + m = re.search(r'(\d+):(\d+):(\d+)', stop_time) + delta = timedelta( + hours=int(m.group(1)), + minutes=int(m.group(2)), + seconds=int(m.group(3))) + return time.mktime((startdate + delta).timetuple()) + + def _to_stop_times(self, trip, fromdate): + return [{ + 'stopSequence': stoptime.stop_sequence, + 'arrival': { + 'time': self._to_seconds(fromdate, stoptime.arrival_time), + 'uncertainty': MFDZ_DEFAULT_UNCERTAINITY + }, + 'departure': { + 'time': self._to_seconds(fromdate, stoptime.departure_time), + 'uncertainty': MFDZ_DEFAULT_UNCERTAINITY + }, + 'stopId': stoptime.stop_id, + 'scheduleRelationship': 'SCHEDULED', + 'stop_time_properties': { + '[transit_realtime.stop_time_properties]': { + 'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE', + 'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE' + } + } + } + for stoptime in trip.stop_times] + + def _as_added_updates(self, trip, fromdate): + return [{ + 'trip': { + 'tripId': trip.trip_id, + 'startTime': trip.start_time_str(), + 'startDate': trip_date, + 'scheduleRelationship': 'ADDED', + 'routeId': trip.trip_id, + '[transit_realtime.trip_descriptor]': { + 'routeUrl' : trip.url, + 'agencyId' : trip.agency, + 'route_long_name' : trip.route_long_name(), + 'route_type': RIDESHARING_ROUTE_TYPE + } + }, + 'stopTimeUpdate': self._to_stop_times(trip, trip_date) + } for trip_date in trip.next_trip_dates(fromdate)] diff --git a/amarillo/app/services/gtfs_constants.py b/amarillo/app/services/gtfs_constants.py new file mode 100644 index 0000000..1e8f3af --- /dev/null +++ b/amarillo/app/services/gtfs_constants.py @@ -0,0 +1,14 @@ +# Constants + +NO_BIKES_ALLOWED = 2 +RIDESHARING_ROUTE_TYPE = 1551 +CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1 +CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2 +STOP_TIMES_STOP_TYPE_REGULARLY = 0 +STOP_TIMES_STOP_TYPE_NONE = 1 +STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2 +STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3 +STOP_TIMES_TIMEPOINT_APPROXIMATE = 0 +STOP_TIMES_TIMEPOINT_EXACT = 1 + +MFDZ_DEFAULT_UNCERTAINITY = 600 \ No newline at end of file diff --git a/amarillo/app/services/gtfsrt/__init__.py b/amarillo/app/services/gtfsrt/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/amarillo/app/services/gtfsrt/gtfs_realtime_pb2.py b/amarillo/app/services/gtfsrt/gtfs_realtime_pb2.py new file mode 100644 index 0000000..4e10463 --- /dev/null +++ b/amarillo/app/services/gtfsrt/gtfs_realtime_pb2.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: gtfs-realtime.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13gtfs-realtime.proto\x12\x10transit_realtime\"y\n\x0b\x46\x65\x65\x64Message\x12,\n\x06header\x18\x01 \x02(\x0b\x32\x1c.transit_realtime.FeedHeader\x12,\n\x06\x65ntity\x18\x02 \x03(\x0b\x32\x1c.transit_realtime.FeedEntity*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xd7\x01\n\nFeedHeader\x12\x1d\n\x15gtfs_realtime_version\x18\x01 \x02(\t\x12Q\n\x0eincrementality\x18\x02 \x01(\x0e\x32+.transit_realtime.FeedHeader.Incrementality:\x0c\x46ULL_DATASET\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\"4\n\x0eIncrementality\x12\x10\n\x0c\x46ULL_DATASET\x10\x00\x12\x10\n\x0c\x44IFFERENTIAL\x10\x01*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xd2\x01\n\nFeedEntity\x12\n\n\x02id\x18\x01 \x02(\t\x12\x19\n\nis_deleted\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x31\n\x0btrip_update\x18\x03 \x01(\x0b\x32\x1c.transit_realtime.TripUpdate\x12\x32\n\x07vehicle\x18\x04 \x01(\x0b\x32!.transit_realtime.VehiclePosition\x12&\n\x05\x61lert\x18\x05 \x01(\x0b\x32\x17.transit_realtime.Alert*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\x82\x08\n\nTripUpdate\x12.\n\x04trip\x18\x01 \x02(\x0b\x32 .transit_realtime.TripDescriptor\x12\x34\n\x07vehicle\x18\x03 \x01(\x0b\x32#.transit_realtime.VehicleDescriptor\x12\x45\n\x10stop_time_update\x18\x02 \x03(\x0b\x32+.transit_realtime.TripUpdate.StopTimeUpdate\x12\x11\n\ttimestamp\x18\x04 \x01(\x04\x12\r\n\x05\x64\x65lay\x18\x05 \x01(\x05\x12\x44\n\x0ftrip_properties\x18\x06 \x01(\x0b\x32+.transit_realtime.TripUpdate.TripProperties\x1aQ\n\rStopTimeEvent\x12\r\n\x05\x64\x65lay\x18\x01 \x01(\x05\x12\x0c\n\x04time\x18\x02 \x01(\x03\x12\x13\n\x0buncertainty\x18\x03 \x01(\x05*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\x1a\xa0\x04\n\x0eStopTimeUpdate\x12\x15\n\rstop_sequence\x18\x01 \x01(\r\x12\x0f\n\x07stop_id\x18\x04 \x01(\t\x12;\n\x07\x61rrival\x18\x02 \x01(\x0b\x32*.transit_realtime.TripUpdate.StopTimeEvent\x12=\n\tdeparture\x18\x03 \x01(\x0b\x32*.transit_realtime.TripUpdate.StopTimeEvent\x12j\n\x15schedule_relationship\x18\x05 \x01(\x0e\x32@.transit_realtime.TripUpdate.StopTimeUpdate.ScheduleRelationship:\tSCHEDULED\x12\\\n\x14stop_time_properties\x18\x06 \x01(\x0b\x32>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x1a>\n\x12StopTimeProperties\x12\x18\n\x10\x61ssigned_stop_id\x18\x01 \x01(\t*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"P\n\x14ScheduleRelationship\x12\r\n\tSCHEDULED\x10\x00\x12\x0b\n\x07SKIPPED\x10\x01\x12\x0b\n\x07NO_DATA\x10\x02\x12\x0f\n\x0bUNSCHEDULED\x10\x03*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\x1aY\n\x0eTripProperties\x12\x0f\n\x07trip_id\x18\x01 \x01(\t\x12\x12\n\nstart_date\x18\x02 \x01(\t\x12\x12\n\nstart_time\x18\x03 \x01(\t*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xdf\t\n\x0fVehiclePosition\x12.\n\x04trip\x18\x01 \x01(\x0b\x32 .transit_realtime.TripDescriptor\x12\x34\n\x07vehicle\x18\x08 \x01(\x0b\x32#.transit_realtime.VehicleDescriptor\x12,\n\x08position\x18\x02 \x01(\x0b\x32\x1a.transit_realtime.Position\x12\x1d\n\x15\x63urrent_stop_sequence\x18\x03 \x01(\r\x12\x0f\n\x07stop_id\x18\x07 \x01(\t\x12Z\n\x0e\x63urrent_status\x18\x04 \x01(\x0e\x32\x33.transit_realtime.VehiclePosition.VehicleStopStatus:\rIN_TRANSIT_TO\x12\x11\n\ttimestamp\x18\x05 \x01(\x04\x12K\n\x10\x63ongestion_level\x18\x06 \x01(\x0e\x32\x31.transit_realtime.VehiclePosition.CongestionLevel\x12K\n\x10occupancy_status\x18\t \x01(\x0e\x32\x31.transit_realtime.VehiclePosition.OccupancyStatus\x12\x1c\n\x14occupancy_percentage\x18\n \x01(\r\x12Q\n\x16multi_carriage_details\x18\x0b \x03(\x0b\x32\x31.transit_realtime.VehiclePosition.CarriageDetails\x1a\xd9\x01\n\x0f\x43\x61rriageDetails\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12^\n\x10occupancy_status\x18\x03 \x01(\x0e\x32\x31.transit_realtime.VehiclePosition.OccupancyStatus:\x11NO_DATA_AVAILABLE\x12 \n\x14occupancy_percentage\x18\x04 \x01(\x05:\x02-1\x12\x19\n\x11\x63\x61rriage_sequence\x18\x05 \x01(\r*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"G\n\x11VehicleStopStatus\x12\x0f\n\x0bINCOMING_AT\x10\x00\x12\x0e\n\nSTOPPED_AT\x10\x01\x12\x11\n\rIN_TRANSIT_TO\x10\x02\"}\n\x0f\x43ongestionLevel\x12\x1c\n\x18UNKNOWN_CONGESTION_LEVEL\x10\x00\x12\x14\n\x10RUNNING_SMOOTHLY\x10\x01\x12\x0f\n\x0bSTOP_AND_GO\x10\x02\x12\x0e\n\nCONGESTION\x10\x03\x12\x15\n\x11SEVERE_CONGESTION\x10\x04\"\xd9\x01\n\x0fOccupancyStatus\x12\t\n\x05\x45MPTY\x10\x00\x12\x18\n\x14MANY_SEATS_AVAILABLE\x10\x01\x12\x17\n\x13\x46\x45W_SEATS_AVAILABLE\x10\x02\x12\x16\n\x12STANDING_ROOM_ONLY\x10\x03\x12\x1e\n\x1a\x43RUSHED_STANDING_ROOM_ONLY\x10\x04\x12\x08\n\x04\x46ULL\x10\x05\x12\x1c\n\x18NOT_ACCEPTING_PASSENGERS\x10\x06\x12\x15\n\x11NO_DATA_AVAILABLE\x10\x07\x12\x11\n\rNOT_BOARDABLE\x10\x08*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\x80\t\n\x05\x41lert\x12\x32\n\ractive_period\x18\x01 \x03(\x0b\x32\x1b.transit_realtime.TimeRange\x12\x39\n\x0finformed_entity\x18\x05 \x03(\x0b\x32 .transit_realtime.EntitySelector\x12;\n\x05\x63\x61use\x18\x06 \x01(\x0e\x32\x1d.transit_realtime.Alert.Cause:\rUNKNOWN_CAUSE\x12>\n\x06\x65\x66\x66\x65\x63t\x18\x07 \x01(\x0e\x32\x1e.transit_realtime.Alert.Effect:\x0eUNKNOWN_EFFECT\x12/\n\x03url\x18\x08 \x01(\x0b\x32\".transit_realtime.TranslatedString\x12\x37\n\x0bheader_text\x18\n \x01(\x0b\x32\".transit_realtime.TranslatedString\x12<\n\x10\x64\x65scription_text\x18\x0b \x01(\x0b\x32\".transit_realtime.TranslatedString\x12;\n\x0ftts_header_text\x18\x0c \x01(\x0b\x32\".transit_realtime.TranslatedString\x12@\n\x14tts_description_text\x18\r \x01(\x0b\x32\".transit_realtime.TranslatedString\x12O\n\x0eseverity_level\x18\x0e \x01(\x0e\x32%.transit_realtime.Alert.SeverityLevel:\x10UNKNOWN_SEVERITY\"\xd8\x01\n\x05\x43\x61use\x12\x11\n\rUNKNOWN_CAUSE\x10\x01\x12\x0f\n\x0bOTHER_CAUSE\x10\x02\x12\x15\n\x11TECHNICAL_PROBLEM\x10\x03\x12\n\n\x06STRIKE\x10\x04\x12\x11\n\rDEMONSTRATION\x10\x05\x12\x0c\n\x08\x41\x43\x43IDENT\x10\x06\x12\x0b\n\x07HOLIDAY\x10\x07\x12\x0b\n\x07WEATHER\x10\x08\x12\x0f\n\x0bMAINTENANCE\x10\t\x12\x10\n\x0c\x43ONSTRUCTION\x10\n\x12\x13\n\x0fPOLICE_ACTIVITY\x10\x0b\x12\x15\n\x11MEDICAL_EMERGENCY\x10\x0c\"\xdd\x01\n\x06\x45\x66\x66\x65\x63t\x12\x0e\n\nNO_SERVICE\x10\x01\x12\x13\n\x0fREDUCED_SERVICE\x10\x02\x12\x16\n\x12SIGNIFICANT_DELAYS\x10\x03\x12\n\n\x06\x44\x45TOUR\x10\x04\x12\x16\n\x12\x41\x44\x44ITIONAL_SERVICE\x10\x05\x12\x14\n\x10MODIFIED_SERVICE\x10\x06\x12\x10\n\x0cOTHER_EFFECT\x10\x07\x12\x12\n\x0eUNKNOWN_EFFECT\x10\x08\x12\x0e\n\nSTOP_MOVED\x10\t\x12\r\n\tNO_EFFECT\x10\n\x12\x17\n\x13\x41\x43\x43\x45SSIBILITY_ISSUE\x10\x0b\"H\n\rSeverityLevel\x12\x14\n\x10UNKNOWN_SEVERITY\x10\x01\x12\x08\n\x04INFO\x10\x02\x12\x0b\n\x07WARNING\x10\x03\x12\n\n\x06SEVERE\x10\x04*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"7\n\tTimeRange\x12\r\n\x05start\x18\x01 \x01(\x04\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x04*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"q\n\x08Position\x12\x10\n\x08latitude\x18\x01 \x02(\x02\x12\x11\n\tlongitude\x18\x02 \x02(\x02\x12\x0f\n\x07\x62\x65\x61ring\x18\x03 \x01(\x02\x12\x10\n\x08odometer\x18\x04 \x01(\x01\x12\r\n\x05speed\x18\x05 \x01(\x02*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xcd\x02\n\x0eTripDescriptor\x12\x0f\n\x07trip_id\x18\x01 \x01(\t\x12\x10\n\x08route_id\x18\x05 \x01(\t\x12\x14\n\x0c\x64irection_id\x18\x06 \x01(\r\x12\x12\n\nstart_time\x18\x02 \x01(\t\x12\x12\n\nstart_date\x18\x03 \x01(\t\x12T\n\x15schedule_relationship\x18\x04 \x01(\x0e\x32\x35.transit_realtime.TripDescriptor.ScheduleRelationship\"t\n\x14ScheduleRelationship\x12\r\n\tSCHEDULED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0f\n\x0bUNSCHEDULED\x10\x02\x12\x0c\n\x08\x43\x41NCELED\x10\x03\x12\x13\n\x0bREPLACEMENT\x10\x05\x1a\x02\x08\x01\x12\x0e\n\nDUPLICATED\x10\x06*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"U\n\x11VehicleDescriptor\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12\x15\n\rlicense_plate\x18\x03 \x01(\t*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xb0\x01\n\x0e\x45ntitySelector\x12\x11\n\tagency_id\x18\x01 \x01(\t\x12\x10\n\x08route_id\x18\x02 \x01(\t\x12\x12\n\nroute_type\x18\x03 \x01(\x05\x12.\n\x04trip\x18\x04 \x01(\x0b\x32 .transit_realtime.TripDescriptor\x12\x0f\n\x07stop_id\x18\x05 \x01(\t\x12\x14\n\x0c\x64irection_id\x18\x06 \x01(\r*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N\"\xa6\x01\n\x10TranslatedString\x12\x43\n\x0btranslation\x18\x01 \x03(\x0b\x32..transit_realtime.TranslatedString.Translation\x1a=\n\x0bTranslation\x12\x0c\n\x04text\x18\x01 \x02(\t\x12\x10\n\x08language\x18\x02 \x01(\t*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90N*\x06\x08\xe8\x07\x10\xd0\x0f*\x06\x08\xa8\x46\x10\x90NB\x1d\n\x1b\x63om.google.transit.realtime') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'gtfs_realtime_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\033com.google.transit.realtime' + _TRIPDESCRIPTOR_SCHEDULERELATIONSHIP.values_by_name["REPLACEMENT"]._options = None + _TRIPDESCRIPTOR_SCHEDULERELATIONSHIP.values_by_name["REPLACEMENT"]._serialized_options = b'\010\001' + _FEEDMESSAGE._serialized_start=41 + _FEEDMESSAGE._serialized_end=162 + _FEEDHEADER._serialized_start=165 + _FEEDHEADER._serialized_end=380 + _FEEDHEADER_INCREMENTALITY._serialized_start=312 + _FEEDHEADER_INCREMENTALITY._serialized_end=364 + _FEEDENTITY._serialized_start=383 + _FEEDENTITY._serialized_end=593 + _TRIPUPDATE._serialized_start=596 + _TRIPUPDATE._serialized_end=1622 + _TRIPUPDATE_STOPTIMEEVENT._serialized_start=887 + _TRIPUPDATE_STOPTIMEEVENT._serialized_end=968 + _TRIPUPDATE_STOPTIMEUPDATE._serialized_start=971 + _TRIPUPDATE_STOPTIMEUPDATE._serialized_end=1515 + _TRIPUPDATE_STOPTIMEUPDATE_STOPTIMEPROPERTIES._serialized_start=1355 + _TRIPUPDATE_STOPTIMEUPDATE_STOPTIMEPROPERTIES._serialized_end=1417 + _TRIPUPDATE_STOPTIMEUPDATE_SCHEDULERELATIONSHIP._serialized_start=1419 + _TRIPUPDATE_STOPTIMEUPDATE_SCHEDULERELATIONSHIP._serialized_end=1499 + _TRIPUPDATE_TRIPPROPERTIES._serialized_start=1517 + _TRIPUPDATE_TRIPPROPERTIES._serialized_end=1606 + _VEHICLEPOSITION._serialized_start=1625 + _VEHICLEPOSITION._serialized_end=2872 + _VEHICLEPOSITION_CARRIAGEDETAILS._serialized_start=2219 + _VEHICLEPOSITION_CARRIAGEDETAILS._serialized_end=2436 + _VEHICLEPOSITION_VEHICLESTOPSTATUS._serialized_start=2438 + _VEHICLEPOSITION_VEHICLESTOPSTATUS._serialized_end=2509 + _VEHICLEPOSITION_CONGESTIONLEVEL._serialized_start=2511 + _VEHICLEPOSITION_CONGESTIONLEVEL._serialized_end=2636 + _VEHICLEPOSITION_OCCUPANCYSTATUS._serialized_start=2639 + _VEHICLEPOSITION_OCCUPANCYSTATUS._serialized_end=2856 + _ALERT._serialized_start=2875 + _ALERT._serialized_end=4027 + _ALERT_CAUSE._serialized_start=3497 + _ALERT_CAUSE._serialized_end=3713 + _ALERT_EFFECT._serialized_start=3716 + _ALERT_EFFECT._serialized_end=3937 + _ALERT_SEVERITYLEVEL._serialized_start=3939 + _ALERT_SEVERITYLEVEL._serialized_end=4011 + _TIMERANGE._serialized_start=4029 + _TIMERANGE._serialized_end=4084 + _POSITION._serialized_start=4086 + _POSITION._serialized_end=4199 + _TRIPDESCRIPTOR._serialized_start=4202 + _TRIPDESCRIPTOR._serialized_end=4535 + _TRIPDESCRIPTOR_SCHEDULERELATIONSHIP._serialized_start=4403 + _TRIPDESCRIPTOR_SCHEDULERELATIONSHIP._serialized_end=4519 + _VEHICLEDESCRIPTOR._serialized_start=4537 + _VEHICLEDESCRIPTOR._serialized_end=4622 + _ENTITYSELECTOR._serialized_start=4625 + _ENTITYSELECTOR._serialized_end=4801 + _TRANSLATEDSTRING._serialized_start=4804 + _TRANSLATEDSTRING._serialized_end=4970 + _TRANSLATEDSTRING_TRANSLATION._serialized_start=4893 + _TRANSLATEDSTRING_TRANSLATION._serialized_end=4954 +# @@protoc_insertion_point(module_scope) diff --git a/amarillo/app/services/gtfsrt/realtime_extension_pb2.py b/amarillo/app/services/gtfsrt/realtime_extension_pb2.py new file mode 100644 index 0000000..d6f5f7e --- /dev/null +++ b/amarillo/app/services/gtfsrt/realtime_extension_pb2.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: realtime_extension.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import app.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor) + gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\007de.mfdz' + _MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67 + _MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179 + _MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182 + _MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486 + _MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398 + _MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486 +# @@protoc_insertion_point(module_scope) diff --git a/amarillo/app/services/regions.py b/amarillo/app/services/regions.py new file mode 100644 index 0000000..dfb9c2c --- /dev/null +++ b/amarillo/app/services/regions.py @@ -0,0 +1,22 @@ +import json +from glob import glob +from typing import Dict + +from app.models.Carpool import Region + + +class RegionService: + + def __init__(self): + self.regions: Dict[str, Region] = {} + + for region_file_name in glob('conf/region/*.json'): + with open(region_file_name) as region_file: + dict = json.load(region_file) + region = Region(**dict) + region_id = region.id + self.regions[region_id] = region + + def get_region(self, region_id: str) -> Region: + region = self.regions.get(region_id) + return region diff --git a/amarillo/app/services/routing.py b/amarillo/app/services/routing.py new file mode 100644 index 0000000..fbf8e02 --- /dev/null +++ b/amarillo/app/services/routing.py @@ -0,0 +1,47 @@ +import requests +import logging + +logger = logging.getLogger(__name__) + +class RoutingException(Exception): + def __init__(self, message): + # Call Exception.__init__(message) + # to use the same Message header as the parent class + super().__init__(message) + +class RoutingService(): + def __init__(self, gh_url = 'https://api.mfdz.de/gh'): + self.gh_service_url = gh_url + + def path_for_stops(self, points): + # Retrieve graphhopper route traversing given points + directions = self._get_directions(points) + if directions and len(directions.get("paths"))>0: + return directions.get("paths")[0] + else: + return {} + + def _get_directions(self, points): + req_url = self._create_url(points, True, True) + logger.debug("Get directions via: {}".format(req_url)) + response = requests.get(req_url) + status = response.status_code + if status == 200: + # Found route between points + return response.json() + else: + try: + message = response.json().get('message') + except: + raise RoutingException("Get directions failed with status code {}".format(status)) + else: + raise RoutingException(message) + + def _create_url(self, points, calc_points = False, instructions = False): + """ Creates GH request URL """ + locations = "" + for point in points: + locations += "point={0}%2C{1}&".format(point.y, point.x) + + return "{0}/route?{1}instructions={2}&calc_points={3}&points_encoded=false".format( + self.gh_service_url, locations, instructions, calc_points) diff --git a/amarillo/app/services/stops.py b/amarillo/app/services/stops.py new file mode 100644 index 0000000..948e489 --- /dev/null +++ b/amarillo/app/services/stops.py @@ -0,0 +1,182 @@ +import csv +import geopandas as gpd +import pandas as pd +from app.models.Carpool import StopTime +from contextlib import closing +from shapely.geometry import Point, LineString +from shapely.ops import transform +from pyproj import Proj, Transformer +import re +import requests +from io import TextIOWrapper +import codecs +import logging + +logger = logging.getLogger(__name__) + +class StopsStore(): + + def __init__(self, stop_sources = [], internal_projection = "EPSG:32632"): + self.internal_projection = internal_projection + self.projection = Transformer.from_crs("EPSG:4326", internal_projection, always_xy=True).transform + self.stopsDataFrames = [] + self.stop_sources = stop_sources + + + def load_stop_sources(self): + """Imports stops from stop_sources and registers them with + the distance they are still associated with a trip. + E.g. bus stops should be registered with a distance of e.g. 30m, + while larger carpool parkings might be registered with e.g. 500m. + + Subsequent calls of load_stop_sources will reload all stop_sources + but replace the current stops only if all stops could be loaded successfully. + """ + stopsDataFrames = [] + error_occured = False + + for stops_source in self.stop_sources: + try: + stopsDataFrame =self._load_stops(stops_source["url"]) + stopsDataFrames.append({'distanceInMeter': stops_source["vicinity"], + 'stops': stopsDataFrame}) + except Exception as err: + error_occured = True + logger.error("Failed to load stops from %s to StopsStore.", stops_source["url"], exc_info=True) + + if not error_occured: + self.stopsDataFrames = stopsDataFrames + + def find_additional_stops_around(self, line, stops = None): + """Returns a GeoDataFrame with all stops in vicinity of the + given line, sorted by distance from origin of the line. + Note: for internal projection/distance calculations, the + lat/lon geometries of line and stops are converted to + """ + stops_frames = [] + if stops: + stops_frames.append(self._convert_to_dataframe(stops)) + transformedLine = transform(self.projection, LineString(line.coordinates)) + for stops_to_match in self.stopsDataFrames: + stops_frames.append(self._find_stops_around_transformed(stops_to_match['stops'], transformedLine, stops_to_match['distanceInMeter'])) + stops = gpd.GeoDataFrame( pd.concat(stops_frames, ignore_index=True, sort=True)) + if not stops.empty: + self._sort_by_distance(stops, transformedLine) + return stops + + def find_closest_stop(self, carpool_stop, max_search_distance): + transformedCoord = Point(self.projection(carpool_stop.lon, carpool_stop.lat)) + best_dist = max_search_distance + 1 + best_stop = None + for stops_with_dist in self.stopsDataFrames: + stops = stops_with_dist['stops'] + s, d = stops.sindex.nearest(transformedCoord, return_all= True, return_distance=True, max_distance=max_search_distance) + if len(d) > 0 and d[0] < best_dist: + best_dist = d[0] + row = s[1][0] + best_stop = StopTime(name=stops.at[row, 'stop_name'], lat=stops.at[row, 'y'], lon=stops.at[row, 'x']) + + return best_stop if best_stop else carpool_stop + + def _normalize_stop_name(self, stop_name): + default_name = 'P+R-Parkplatz' + if stop_name in ('', 'Park&Ride'): + return default_name + normalized_stop_name = re.sub(r"P(ark)?\s?[\+&]\s?R(ail|ide)?",'P+R', stop_name) + + return normalized_stop_name + + def _load_stops(self, source : str): + """Loads stops from given source and registers them with + the distance they are still associated with a trip. + E.g. bus stops should be registered with a distance of e.g. 30m, + while larger carpool parkings might be registered with e.g. 500m + """ + logger.info("Load stops from %s", source) + if source.startswith('http'): + if source.endswith('json'): + with requests.get(source) as json_source: + stopsDataFrame = self._load_stops_geojson(json_source.json()) + else: + with requests.get(source) as csv_source: + stopsDataFrame = self._load_stops_csv(codecs.iterdecode(csv_source.iter_lines(), 'utf-8')) + else: + with open(source, encoding='utf-8') as csv_source: + stopsDataFrame = self._load_stops_csv(csv_source) + + return stopsDataFrame + + def _load_stops_csv(self, csv_source): + id = [] + lat = [] + lon = [] + stop_name = [] + reader = csv.DictReader(csv_source, delimiter=';') + columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name'] + lists = [id, lat, lon, stop_name] + for row in reader: + for col, lst in zip(columns, lists): + if col == "stop_lat" or col == "stop_lon": + lst.append(float(row[col].replace(",","."))) + elif col == "stop_name": + row_stop_name = self._normalize_stop_name(row[col]) + lst.append(row_stop_name) + else: + lst.append(row[col]) + + return self._as_dataframe(id, lat, lon, stop_name) + + def _load_stops_geojson(self, geojson_source): + id = [] + lat = [] + lon = [] + stop_name = [] + columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name'] + lists = [id, lat, lon, stop_name] + for row in geojson_source['features']: + coord = row['geometry']['coordinates'] + if not coord or not row['properties'].get('name'): + logger.error('Stop feature {} has null coord or name'.format(row['id'])) + continue + for col, lst in zip(columns, lists): + if col == "stop_lat": + lst.append(coord[1]) + elif col == "stop_lon": + lst.append(coord[0]) + elif col == "stop_name": + row_stop_name = self._normalize_stop_name(row['properties']['name']) + lst.append(row_stop_name) + elif col == "stop_id": + lst.append(row['id']) + + return self._as_dataframe(id, lat, lon, stop_name) + + def _as_dataframe(self, id, lat, lon, stop_name): + + df = gpd.GeoDataFrame(data={'x':lon, 'y':lat, 'stop_name':stop_name, 'id':id}) + stopsGeoDataFrame = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.x, df.y, crs='EPSG:4326')) + stopsGeoDataFrame.to_crs(crs=self.internal_projection, inplace=True) + return stopsGeoDataFrame + + def _find_stops_around_transformed(self, stopsDataFrame, transformedLine, distance): + bufferedLine = transformedLine.buffer(distance) + sindex = stopsDataFrame.sindex + possible_matches_index = list(sindex.intersection(bufferedLine.bounds)) + possible_matches = stopsDataFrame.iloc[possible_matches_index] + exact_matches = possible_matches[possible_matches.intersects(bufferedLine)] + + return exact_matches + + def _convert_to_dataframe(self, stops): + return gpd.GeoDataFrame([[stop.name, stop.lon, stop.lat, + stop.id, Point(self.projection(stop.lon, stop.lat))] for stop in stops], columns = ['stop_name','x','y','id','geometry'], crs=self.internal_projection) + + def _sort_by_distance(self, stops, transformedLine): + stops['distance']=stops.apply(lambda row: transformedLine.project(row['geometry']), axis=1) + stops.sort_values('distance', inplace=True) + +def is_carpooling_stop(stop_id, name): + stop_name = name.lower() + # mfdz: or bbnavi: prefixed stops are custom stops which are explicitly meant to be carpooling stops + return stop_id.startswith('mfdz:') or stop_id.startswith('bbnavi:') or 'mitfahr' in stop_name or 'p&m' in stop_name + diff --git a/amarillo/app/services/trips.py b/amarillo/app/services/trips.py new file mode 100644 index 0000000..0f87821 --- /dev/null +++ b/amarillo/app/services/trips.py @@ -0,0 +1,374 @@ +from app.models.gtfs import GtfsTimeDelta, GtfsStopTime +from app.models.Carpool import MAX_STOPS_PER_TRIP, Carpool, Weekday, StopTime, PickupDropoffType +from app.services.gtfs_constants import * +from app.services.routing import RoutingService, RoutingException +from app.services.stops import is_carpooling_stop +from app.utils.utils import assert_folder_exists, is_older_than_days, yesterday, geodesic_distance_in_m +from shapely.geometry import Point, LineString, box +from geojson_pydantic.geometries import LineString as GeoJSONLineString +from datetime import datetime, timedelta +import numpy as np +import os +import json +import logging + +logger = logging.getLogger(__name__) + +class Trip: + + def __init__(self, trip_id, route_name, headsign, url, calendar, departureTime, path, agency, lastUpdated, stop_times, bbox): + if isinstance(calendar, set): + self.runs_regularly = True + self.weekdays = [ + 1 if Weekday.monday in calendar else 0, + 1 if Weekday.tuesday in calendar else 0, + 1 if Weekday.wednesday in calendar else 0, + 1 if Weekday.thursday in calendar else 0, + 1 if Weekday.friday in calendar else 0, + 1 if Weekday.saturday in calendar else 0, + 1 if Weekday.sunday in calendar else 0, + ] + start_in_day = self._total_seconds(departureTime) + else: + self.start = datetime.combine(calendar, departureTime) + self.runs_regularly = False + self.weekdays = [0,0,0,0,0,0,0] + + self.start_time = departureTime + self.path = path + self.trip_id = trip_id + self.url = url + self.agency = agency + self.stops = [] + self.lastUpdated = lastUpdated + self.stop_times = stop_times + self.bbox = bbox + self.route_name = route_name + self.trip_headsign = headsign + + def path_as_line_string(self): + return path + + def _total_seconds(self, instant): + return instant.hour * 3600 + instant.minute * 60 + instant.second + + def start_time_str(self): + return self.start_time.strftime("%H:%M:%S") + + def next_trip_dates(self, start_date, day_count=14): + if self.runs_regularly: + for single_date in (start_date + timedelta(n) for n in range(day_count)): + if self.weekdays[single_date.weekday()]==1: + yield single_date.strftime("%Y%m%d") + else: + yield self.start.strftime("%Y%m%d") + + def route_long_name(self): + return self.route_name + + def intersects(self, bbox): + return self.bbox.intersects(box(*bbox)) + + +class TripStore(): + """ + TripStore maintains the currently valid trips. A trip is a + carpool offer enhanced with all stops this + + Attributes: + trips Dict of currently valid trips. + deleted_trips Dict of recently deleted trips. + """ + + def __init__(self, stops_store): + self.transformer = TripTransformer(stops_store) + self.stops_store = stops_store + self.trips = {} + self.deleted_trips = {} + self.recent_trips = {} + + + def put_carpool(self, carpool: Carpool): + """ + Adds carpool to the TripStore. + """ + id = "{}:{}".format(carpool.agency, carpool.id) + filename = f'data/enhanced/{carpool.agency}/{carpool.id}.json' + try: + existing_carpool = self._load_carpool_if_exists(carpool.agency, carpool.id) + if existing_carpool and existing_carpool.lastUpdated == carpool.lastUpdated: + enhanced_carpool = existing_carpool + else: + if len(carpool.stops) < 2 or self.distance_in_m(carpool) < 1000: + logger.warning("Failed to add carpool %s:%s to TripStore, distance too low", carpool.agency, carpool.id) + self.handle_failed_carpool_enhancement(carpool) + return + enhanced_carpool = self.transformer.enhance_carpool(carpool) + # TODO should only store enhanced_carpool, if it has 2 or more stops + assert_folder_exists(f'data/enhanced/{carpool.agency}/') + with open(filename, 'w', encoding='utf-8') as f: + f.write(enhanced_carpool.json()) + logger.info("Added enhanced carpool %s:%s", carpool.agency, carpool.id) + + return self._load_as_trip(enhanced_carpool) + except RoutingException as err: + logger.warning("Failed to add carpool %s:%s to TripStore due to RoutingException %s", carpool.agency, carpool.id, getattr(err, 'message', repr(err))) + self.handle_failed_carpool_enhancement(carpool) + except Exception as err: + logger.error("Failed to add carpool %s:%s to TripStore.", carpool.agency, carpool.id, exc_info=True) + self.handle_failed_carpool_enhancement(carpool) + + def handle_failed_carpool_enhancement(sellf, carpool: Carpool): + assert_folder_exists(f'data/failed/{carpool.agency}/') + with open(f'data/failed/{carpool.agency}/{carpool.id}.json', 'w', encoding='utf-8') as f: + f.write(carpool.json()) + + def distance_in_m(self, carpool): + if len(carpool.stops) < 2: + return 0 + s1 = carpool.stops[0] + s2 = carpool.stops[-1] + return geodesic_distance_in_m((s1.lon, s1.lat),(s2.lon, s2.lat)) + + def recently_added_trips(self): + return list(self.recent_trips.values()) + + def recently_deleted_trips(self): + return list(self.deleted_trips.values()) + + def _load_carpool_if_exists(self, agency_id: str, carpool_id: str): + if carpool_exists(agency_id, carpool_id, 'data/enhanced'): + try: + return load_carpool(agency_id, carpool_id, 'data/enhanced') + except Exception as e: + # An error on restore could be caused by model changes, + # in such a case, it need's to be recreated + logger.warning("Could not restore enhanced trip %s:%s, reason: %s", agency_id, carpool_id, repr(e)) + + return None + + def _load_as_trip(self, carpool: Carpool): + trip = self.transformer.transform_to_trip(carpool) + id = trip.trip_id + self.trips[id] = trip + if not is_older_than_days(carpool.lastUpdated, 1): + self.recent_trips[id] = trip + logger.debug("Added trip %s", id) + + return trip + + def delete_carpool(self, agency_id: str, carpool_id: str): + """ + Deletes carpool from the TripStore. + """ + agencyScopedCarpoolId = f"{agency_id}:{carpool_id}" + trip_to_be_deleted = self.trips.get(agencyScopedCarpoolId) + if trip_to_be_deleted: + self.deleted_trips[agencyScopedCarpoolId] = trip_to_be_deleted + del self.trips[agencyScopedCarpoolId] + + if self.recent_trips.get(agencyScopedCarpoolId): + del self.recent_trips[agencyScopedCarpoolId] + + if carpool_exists(agency_id, carpool_id): + remove_carpool_file(agency_id, carpool_id) + + logger.debug("Deleted trip %s", id) + + def unflag_unrecent_updates(self): + """ + Trips that were last updated before yesterday, are not recent + any longer. As no updates need to be sent for them any longer, + they will be removed from recent recent_trips and deleted_trips. + """ + for key in list(self.recent_trips): + t = self.recent_trips.get(key) + if t and t.lastUpdated.date() < yesterday(): + del self.recent_trips[key] + + for key in list(self.deleted_trips): + t = self.deleted_trips.get(key) + if t and t.lastUpdated.date() < yesterday(): + del self.deleted_trips[key] + + +class TripTransformer: + REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS = True + REPLACEMENT_STOPS_SERACH_RADIUS_IN_M = 1000 + SIMPLIFY_TOLERANCE = 0.0001 + + router = RoutingService() + + def __init__(self, stops_store): + self.stops_store = stops_store + + def transform_to_trip(self, carpool): + stop_times = self._convert_stop_times(carpool) + route_name = carpool.stops[0].name + " nach " + carpool.stops[-1].name + headsign= carpool.stops[-1].name + trip_id = self._trip_id(carpool) + path = carpool.path + bbox = box( + min([pt[0] for pt in path.coordinates]), + min([pt[1] for pt in path.coordinates]), + max([pt[0] for pt in path.coordinates]), + max([pt[1] for pt in path.coordinates])) + + trip = Trip(trip_id, route_name, headsign, str(carpool.deeplink), carpool.departureDate, carpool.departureTime, carpool.path, carpool.agency, carpool.lastUpdated, stop_times, bbox) + + return trip + + def _trip_id(self, carpool): + return f"{carpool.agency}:{carpool.id}" + + def _replace_stops_by_transit_stops(self, carpool, max_search_distance): + new_stops = [] + for carpool_stop in carpool.stops: + new_stops.append(self.stops_store.find_closest_stop(carpool_stop, max_search_distance)) + return new_stops + + def enhance_carpool(self, carpool): + if self.REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS: + carpool.stops = self._replace_stops_by_transit_stops(carpool, self.REPLACEMENT_STOPS_SERACH_RADIUS_IN_M) + + path = self._path_for_ride(carpool) + lineString_shapely_wgs84 = LineString(coordinates = path["points"]["coordinates"]).simplify(0.0001) + lineString_wgs84 = GeoJSONLineString(type="LineString", coordinates=list(lineString_shapely_wgs84.coords)) + virtual_stops = self.stops_store.find_additional_stops_around(lineString_wgs84, carpool.stops) + if not virtual_stops.empty: + virtual_stops["time"] = self._estimate_times(path, virtual_stops['distance']) + logger.debug("Virtual stops found: {}".format(virtual_stops)) + if len(virtual_stops) > MAX_STOPS_PER_TRIP: + # in case we found more than MAX_STOPS_PER_TRIP, we retain first and last + # half of MAX_STOPS_PER_TRIP + virtual_stops = virtual_stops.iloc[np.r_[0:int(MAX_STOPS_PER_TRIP/2), int(MAX_STOPS_PER_TRIP/2):]] + + trip_id = f"{carpool.agency}:{carpool.id}" + stop_times = self._stops_and_stop_times(carpool.departureTime, trip_id, virtual_stops) + + enhanced_carpool = carpool.copy() + enhanced_carpool.stops = stop_times + enhanced_carpool.path = lineString_wgs84 + return enhanced_carpool + + def _convert_stop_times(self, carpool): + + stop_times = [GtfsStopTime( + self._trip_id(carpool), + stop.arrivalTime, + stop.departureTime, + stop.id, + seq_nr+1, + STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_dropoff else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER, + STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_pickup else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER, + STOP_TIMES_TIMEPOINT_APPROXIMATE) + for seq_nr, stop in enumerate(carpool.stops)] + return stop_times + + def _path_for_ride(self, carpool): + points = self._stop_coords(carpool.stops) + return self.router.path_for_stops(points) + + def _stop_coords(self, stops): + # Retrieve coordinates of all officially announced stops (start, intermediate, target) + return [Point(stop.lon, stop.lat) for stop in stops] + + def _estimate_times(self, path, distances_from_start): + cumulated_distance = 0 + cumulated_time = 0 + stop_times = [] + instructions = path["instructions"] + + cnt = 0 + instr_distance = instructions[cnt]["distance"] + instr_time = instructions[cnt]["time"] + + for distance in distances_from_start: + while cnt < len(instructions) and cumulated_distance + instructions[cnt]["distance"] < distance: + cumulated_distance = cumulated_distance + instructions[cnt]["distance"] + cumulated_time = cumulated_time + instructions[cnt]["time"] + cnt = cnt + 1 + + if cnt < len(instructions): + if instructions[cnt]["distance"] ==0: + raise RoutingException("Origin and destinaction too close") + percent_dist = (distance - cumulated_distance) / instructions[cnt]["distance"] + stop_time = cumulated_time + percent_dist * instructions[cnt]["time"] + stop_times.append(stop_time) + else: + logger.debug("distance {} exceeds total length {}, using max arrival time {}".format(distance, cumulated_distance, cumulated_time)) + stop_times.append(cumulated_time) + return stop_times + + def _stops_and_stop_times(self, start_time, trip_id, stops_frame): + # Assumptions: + # arrival_time = departure_time + # pickup_type, drop_off_type for origin: = coordinate/none + # pickup_type, drop_off_type for destination: = none/coordinate + # timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners) + number_of_stops = len(stops_frame.index) + total_distance = stops_frame.iloc[number_of_stops-1]["distance"] + + first_stop_time = GtfsTimeDelta(hours = start_time.hour, minutes = start_time.minute, seconds = start_time.second) + stop_times = [] + seq_nr = 0 + for i in range(0, number_of_stops): + current_stop = stops_frame.iloc[i] + + if not current_stop.id: + continue + elif i == 0: + if (stops_frame.iloc[1].time-current_stop.time) < 1000: + # skip custom stop if there is an official stop very close by + logger.debug("Skipped stop %s", current_stop.id) + continue + else: + if (current_stop.time-stops_frame.iloc[i-1].time) < 5000 and not i==1 and not is_carpooling_stop(current_stop.id, current_stop.stop_name): + # skip latter stop if it's very close (<5 seconds drive) by the preceding + logger.debug("Skipped stop %s", current_stop.id) + continue + trip_time = timedelta(milliseconds=int(current_stop.time)) + is_dropoff = self._is_dropoff_stop(current_stop, total_distance) + is_pickup = self._is_pickup_stop(current_stop, total_distance) + # TODO would be nice if possible to publish a minimum shared distance + pickup_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_pickup else STOP_TIMES_STOP_TYPE_NONE + dropoff_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_dropoff else STOP_TIMES_STOP_TYPE_NONE + + if is_pickup and not is_dropoff: + pickup_dropoff = PickupDropoffType.only_pickup + elif not is_pickup and is_dropoff: + pickup_dropoff = PickupDropoffType.only_dropoff + else: + pickup_dropoff = PickupDropoffType.pickup_and_dropoff + + next_stop_time = first_stop_time + trip_time + seq_nr += 1 + stop_times.append(StopTime(**{ + 'arrivalTime': str(next_stop_time), + 'departureTime': str(next_stop_time), + 'id': current_stop.id, + 'pickup_dropoff': pickup_dropoff, + 'name': str(current_stop.stop_name), + 'lat': current_stop.y, + 'lon': current_stop.x + })) + + return stop_times + + def _is_dropoff_stop(self, current_stop, total_distance): + return current_stop["distance"] >= 0.5 * total_distance + + def _is_pickup_stop(self, current_stop, total_distance): + return current_stop["distance"] < 0.5 * total_distance + +def load_carpool(agency_id: str, carpool_id: str, folder: str ='data/enhanced') -> Carpool: + with open(f'{folder}/{agency_id}/{carpool_id}.json', 'r', encoding='utf-8') as f: + dict = json.load(f) + carpool = Carpool(**dict) + return carpool + +def carpool_exists(agency_id: str, carpool_id: str, folder: str ='data/enhanced'): + return os.path.exists(f"{folder}/{agency_id}/{carpool_id}.json") + +def remove_carpool_file(agency_id: str, carpool_id: str, folder: str ='data/enhanced'): + return os.remove(f"{folder}/{agency_id}/{carpool_id}.json")