first commit

This commit is contained in:
Csaba 2024-05-03 12:49:45 +02:00
parent abd9ff1fe7
commit 5b1c9cb80e
17 changed files with 1577 additions and 0 deletions

169
.gitignore vendored Normal file
View file

@ -0,0 +1,169 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
data/
secrets
logging.conf
config
static/**
templates/**
conf/**

39
.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,39 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
// {
// "name": "Debug Tests",
// "type": "debugpy",
// "request": "launch",
// "purpose": ["debug-test"],
// "module": "pytest",
// "console": "integratedTerminal",
// "justMyCode": true,
// "env": {
// "_PYTEST_RAISE": "1"
// },
// },
{
"name": "Python: FastAPI",
"type": "debugpy",
"request": "launch",
"module": "uvicorn",
"args": [
"amarillo-gtfs-generator.gtfs_generator:app",
"--workers=1",
"--port=8002"
],
// "preLaunchTask": "enhance",
"jinja": true,
"justMyCode": false,
"env": {
"admin_token": "supersecret",
"ride2go_token": "supersecret2"
}
}
]
}

View file

@ -0,0 +1 @@
from .gtfs_generator import setup

View file

@ -0,0 +1,137 @@
import amarillo.plugins.gtfs_export.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2
import amarillo.plugins.gtfs_export.gtfsrt.realtime_extension_pb2 as mfdzrte
from amarillo.plugins.gtfs_export.gtfs_constants import *
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import ParseDict
from datetime import datetime, timedelta
import json
import re
import time
class GtfsRtProducer():
def __init__(self, trip_store):
self.trip_store = trip_store
def generate_feed(self, time, format='protobuf', bbox=None):
# See https://developers.google.com/transit/gtfs-realtime/reference
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
gtfsrt_dict = {
'header': {
'gtfsRealtimeVersion': '1.0',
'timestamp': int(time)
},
'entity': self._get_trip_updates(bbox)
}
feed = gtfs_realtime_pb2.FeedMessage()
ParseDict(gtfsrt_dict, feed)
if "message" == format.lower():
return feed
elif "json" == format.lower():
return MessageToDict(feed)
else:
return feed.SerializeToString()
def export_feed(self, timestamp, file_path, bbox=None):
"""
Exports gtfs-rt feed as .json and .pbf file to file_path
"""
feed = self.generate_feed(timestamp, "message", bbox)
with open(f"{file_path}.pbf", "wb") as f:
f.write(feed.SerializeToString())
with open(f"{file_path}.json", "w") as f:
json.dump(MessageToDict(feed), f)
def _get_trip_updates(self, bbox = None):
trips = []
trips.extend(self._get_added(bbox))
trips.extend(self._get_deleted(bbox))
trip_updates = []
for num, trip in enumerate(trips):
trip_updates.append( {
'id': f'carpool-update-{num}',
'tripUpdate': trip
}
)
return trip_updates
def _get_deleted(self, bbox = None):
return self._get_updates(
self.trip_store.recently_deleted_trips(),
self._as_delete_updates,
bbox)
def _get_added(self, bbox = None):
return self._get_updates(
self.trip_store.recently_added_trips(),
self._as_added_updates,
bbox)
def _get_updates(self, trips, update_func, bbox = None):
updates = []
today = datetime.today()
for t in trips:
if bbox == None or t.intersects(bbox):
updates.extend(update_func(t, today))
return updates
def _as_delete_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'CANCELED',
'routeId': trip.trip_id
}
} for trip_date in trip.next_trip_dates(fromdate)]
def _to_seconds(self, fromdate, stop_time):
startdate = datetime.strptime(fromdate, '%Y%m%d')
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
delta = timedelta(
hours=int(m.group(1)),
minutes=int(m.group(2)),
seconds=int(m.group(3)))
return time.mktime((startdate + delta).timetuple())
def _to_stop_times(self, trip, fromdate):
return [{
'stopSequence': stoptime.stop_sequence,
'arrival': {
'time': self._to_seconds(fromdate, stoptime.arrival_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'departure': {
'time': self._to_seconds(fromdate, stoptime.departure_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'stopId': stoptime.stop_id,
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
}
}
}
for stoptime in trip.stop_times]
def _as_added_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'ADDED',
'routeId': trip.trip_id,
'[transit_realtime.trip_descriptor]': {
'routeUrl' : trip.url,
'agencyId' : trip.agency,
'route_long_name' : trip.route_long_name(),
'route_type': RIDESHARING_ROUTE_TYPE
}
},
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
} for trip_date in trip.next_trip_dates(fromdate)]

View file

@ -0,0 +1,14 @@
# Constants
NO_BIKES_ALLOWED = 2
RIDESHARING_ROUTE_TYPE = 1551
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
STOP_TIMES_STOP_TYPE_REGULARLY = 0
STOP_TIMES_STOP_TYPE_NONE = 1
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
STOP_TIMES_TIMEPOINT_EXACT = 1
MFDZ_DEFAULT_UNCERTAINITY = 600

View file

@ -0,0 +1,230 @@
from collections.abc import Iterable
from datetime import datetime, timedelta
from zipfile import ZipFile
import csv
import gettext
import logging
import re
from amarillo.utils.utils import assert_folder_exists
from amarillo.plugins.gtfs_export.models.gtfs import GtfsTimeDelta, GtfsFeedInfo, GtfsAgency, GtfsRoute, GtfsStop, GtfsStopTime, GtfsTrip, GtfsCalendar, GtfsCalendarDate, GtfsShape
from amarillo.plugins.enhancer.services.stops import is_carpooling_stop
from amarillo.plugins.gtfs_export.gtfs_constants import *
from .models.Carpool import Agency
logger = logging.getLogger(__name__)
class GtfsExport:
stops_counter = 0
trips_counter = 0
routes_counter = 0
stored_stops = {}
def __init__(self, agencies: dict[str, Agency], feed_info, ridestore, stopstore, bbox = None):
self.stops = {}
self.routes = []
self.calendar_dates = []
self.calendar = []
self.trips = []
self.stop_times = []
self.calendar = []
self.shapes = []
self.agencies = [GtfsAgency(a.id, a.name, a.url, a.timezone, a.lang, a.email) for a in agencies.values()]
self.feed_info = feed_info
self.localized_to = " nach "
self.localized_short_name = "Mitfahrgelegenheit"
self.stopstore = stopstore
self.ridestore = ridestore
self.bbox = bbox
def export(self, gtfszip_filename, gtfsfolder):
assert_folder_exists(gtfsfolder)
self._prepare_gtfs_feed(self.ridestore, self.stopstore)
self._write_csvfile(gtfsfolder, 'agency.txt', self.agencies)
self._write_csvfile(gtfsfolder, 'feed_info.txt', self.feed_info)
self._write_csvfile(gtfsfolder, 'routes.txt', self.routes)
self._write_csvfile(gtfsfolder, 'trips.txt', self.trips)
self._write_csvfile(gtfsfolder, 'calendar.txt', self.calendar)
self._write_csvfile(gtfsfolder, 'calendar_dates.txt', self.calendar_dates)
self._write_csvfile(gtfsfolder, 'stops.txt', self.stops.values())
self._write_csvfile(gtfsfolder, 'stop_times.txt', self.stop_times)
self._write_csvfile(gtfsfolder, 'shapes.txt', self.shapes)
self._zip_files(gtfszip_filename, gtfsfolder)
def _zip_files(self, gtfszip_filename, gtfsfolder):
gtfsfiles = ['agency.txt', 'feed_info.txt', 'routes.txt', 'trips.txt',
'calendar.txt', 'calendar_dates.txt', 'stops.txt', 'stop_times.txt', 'shapes.txt']
with ZipFile(gtfszip_filename, 'w') as gtfszip:
for gtfsfile in gtfsfiles:
gtfszip.write(gtfsfolder+'/'+gtfsfile, gtfsfile)
def _prepare_gtfs_feed(self, ridestore, stopstore):
"""
Prepares all gtfs objects in memory before they are written
to their respective streams.
For all wellknown stops a GTFS stop is created and
afterwards all ride offers are transformed into their
gtfs equivalents.
"""
for stopSet in stopstore.stopsDataFrames:
for stop in stopSet["stops"].itertuples():
self._load_stored_stop(stop)
cloned_trips = dict(ridestore.trips)
for _, trip in cloned_trips.items():
if self.bbox is None or trip.intersects(self.bbox):
self._convert_trip(trip)
def _convert_trip(self, trip):
self.routes_counter += 1
self.routes.append(self._create_route(trip))
self.calendar.append(self._create_calendar(trip))
if not trip.runs_regularly:
self.calendar_dates.append(self._create_calendar_date(trip))
self.trips.append(self._create_trip(trip, self.routes_counter))
self._append_stops_and_stop_times(trip)
self._append_shapes(trip, self.routes_counter)
def _trip_headsign(self, destination):
destination = destination.replace('(Deutschland)', '')
destination = destination.replace(', Deutschland', '')
appendix = ''
if 'Schweiz' in destination or 'Switzerland' in destination:
appendix = ', Schweiz'
destination = destination.replace('(Schweiz)', '')
destination = destination.replace(', Schweiz', '')
destination = destination.replace('(Switzerland)', '')
try:
matches = re.match(r"(.*,)? ?(\d{4,5})? ?(.*)", destination)
match = matches.group(3).strip() if matches != None else destination.strip()
if match[-1]==')' and not '(' in match:
match = match[0:-1]
return match + appendix
except Exception as ex:
logger.error("error for "+destination )
logger.exception(ex)
return destination
def _create_route(self, trip):
return GtfsRoute(trip.agency, trip.trip_id, trip.route_long_name(), RIDESHARING_ROUTE_TYPE, trip.url, "", trip.route_color, trip.route_text_color)
def _create_calendar(self, trip):
# TODO currently, calendar is not provided by Fahrgemeinschaft.de interface.
# We could apply some heuristics like requesting multiple days and extrapolate
# if multiple trips are found, but better would be to have these provided by the
# offical interface. Then validity periods should be provided as well (not
# sure if these are available)
# For fahrgemeinschaft.de, regurlar trips are recognizable via their url
# which contains "regelmaessig". However, we don't know on which days of the week,
# nor until when. As a first guess, if datetime is a mo-fr, we assume each workday,
# if it's sa/su, only this...
feed_start_date = datetime.today()
stop_date = self._convert_stop_date(feed_start_date)
return GtfsCalendar(trip.trip_id, stop_date, self._convert_stop_date(feed_start_date + timedelta(days=31)), *(trip.weekdays))
def _create_calendar_date(self, trip):
return GtfsCalendarDate(trip.trip_id, self._convert_stop_date(trip.start), CALENDAR_DATES_EXCEPTION_TYPE_ADDED)
def _create_trip(self, trip, shape_id):
return GtfsTrip(trip.trip_id, trip.trip_id, trip.trip_id, shape_id, trip.trip_headsign, NO_BIKES_ALLOWED)
def _convert_stop(self, stop):
"""
Converts a stop represented as pandas row to a gtfs stop.
Expected attributes of stop: id, stop_name, x, y (in wgs84)
"""
if stop.id:
id = stop.id
else:
self.stops_counter += 1
id = "tmp-{}".format(self.stops_counter)
stop_name = "k.A." if stop.stop_name is None else stop.stop_name
return GtfsStop(id, stop.y, stop.x, stop_name)
def _append_stops_and_stop_times(self, trip):
# Assumptions:
# arrival_time = departure_time
# pickup_type, drop_off_type for origin: = coordinate/none
# pickup_type, drop_off_type for destination: = none/coordinate
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
for stop_time in trip.stop_times:
# retrieve stop from stored_stops and mark it to be exported
wkn_stop = self.stored_stops.get(stop_time.stop_id)
if not wkn_stop:
logger.warning("No stop found in stop_store for %s. Will skip stop_time %s of trip %s", stop_time.stop_id, stop_time.stop_sequence, trip.trip_id)
else:
self.stops[stop_time.stop_id] = wkn_stop
# Append stop_time
self.stop_times.append(stop_time)
def _append_shapes(self, trip, shape_id):
counter = 0
for point in trip.path.coordinates:
counter += 1
self.shapes.append(GtfsShape(shape_id, point[0], point[1], counter))
def _stop_hash(self, stop):
return "{}#{}#{}".format(stop.stop_name,stop.x,stop.y)
def _should_always_export(self, stop):
"""
Returns true, if the given stop shall be exported to GTFS,
regardless, if it's part of a trip or not.
This is necessary, as potential stops are required
to be part of the GTFS to be referenced later on
by dynamicly added trips.
"""
if self.bbox:
return (self.bbox[0] <= stop.stop_lon <= self.bbox[2] and
self.bbox[1] <= stop.stop_lat <= self.bbox[3])
else:
return is_carpooling_stop(stop.stop_id, stop.stop_name)
def _load_stored_stop(self, stop):
gtfsstop = self._convert_stop(stop)
stop_hash = self._stop_hash(stop)
self.stored_stops[gtfsstop.stop_id] = gtfsstop
if self._should_always_export(gtfsstop):
self.stops[gtfsstop.stop_id] = gtfsstop
def _get_stop_by_hash(self, stop_hash):
return self.stops.get(stop_hash, self.stored_stops.get(stop_hash))
def _get_or_create_stop(self, stop):
stop_hash = self._stop_hash(stop)
gtfsstop = self.stops.get(stop_hash)
if gtfsstop is None:
gtfsstop = self.stored_stops.get(stop_hash, self._convert_stop(stop))
self.stops[stop_hash] = gtfsstop
return gtfsstop
def _convert_stop_date(self, date_time):
return date_time.strftime("%Y%m%d")
def _write_csvfile(self, gtfsfolder, filename, content):
with open(gtfsfolder+"/"+filename, 'w', newline="\n", encoding="utf-8") as csvfile:
self._write_csv(csvfile, content)
def _write_csv(self, csvfile, content):
if hasattr(content, '_fields'):
writer = csv.DictWriter(csvfile, content._fields)
writer.writeheader()
writer.writerow(content._asdict())
else:
if content:
writer = csv.DictWriter(csvfile, next(iter(content))._fields)
writer.writeheader()
for record in content:
writer.writerow(record._asdict())

View file

@ -0,0 +1,220 @@
from fastapi import FastAPI, Body, status
from fastapi.responses import FileResponse
from .gtfs_export import GtfsExport, GtfsFeedInfo, GtfsAgency
from .gtfs import GtfsRtProducer
from amarillo.utils.container import container
# from amarillo.plugins.gtfs_export.router import router
from amarillo.plugins.enhancer.configuration import configure_enhancer_services
from glob import glob
import json
import schedule
import threading
import time
import logging
from .models.Carpool import Carpool, Region
from .router import _assert_region_exists
from amarillo.plugins.enhancer.services import stops
from amarillo.plugins.enhancer.services.trips import TripStore, Trip
from amarillo.plugins.enhancer.services.carpools import CarpoolService
from amarillo.services.agencies import AgencyService
from amarillo.services.regions import RegionService
logger = logging.getLogger(__name__)
def init():
container['agencies'] = AgencyService()
logger.info("Loaded %d agencies", len(container['agencies'].agencies))
container['regions'] = RegionService()
logger.info("Loaded %d regions", len(container['regions'].regions))
logger.info("Load stops...")
with open('data/stop_sources.json') as stop_sources_file:
stop_sources = json.load(stop_sources_file)
stop_store = stops.StopsStore(stop_sources)
stop_store.load_stop_sources()
# TODO: do we need container?
container['stops_store'] = stop_store
container['trips_store'] = TripStore(stop_store)
# TODO: do we need the carpool service at all?
container['carpools'] = CarpoolService(container['trips_store'])
logger.info("Restore carpools...")
for agency_id in container['agencies'].agencies:
for carpool_file_name in glob(f'data/carpool/{agency_id}/*.json'):
try:
with open(carpool_file_name) as carpool_file:
carpool = Carpool(**(json.load(carpool_file)))
#TODO: convert to trip and add to tripstore directly
container['carpools'].put(carpool.agency, carpool.id, carpool)
except Exception as e:
logger.warning("Issue during restore of carpool %s: %s", carpool_file_name, repr(e))
def run_schedule():
while 1:
try:
schedule.run_pending()
except Exception as e:
logger.exception(e)
time.sleep(1)
def midnight():
container['stops_store'].load_stop_sources()
# container['trips_store'].unflag_unrecent_updates()
# container['carpools'].purge_outdated_offers()
generate_gtfs()
#TODO: generate for a specific region only
#TODO: what happens when there are no trips?
def generate_gtfs():
logger.info("Generate GTFS")
for region in container['regions'].regions.values():
# TODO make feed producer infos configurable
feed_info = GtfsFeedInfo('mfdz', 'MITFAHR|DE|ZENTRALE', 'http://www.mitfahrdezentrale.de', 'de', 1)
exporter = GtfsExport(
container['agencies'].agencies,
feed_info,
container['trips_store'], # TODO: read carpools from disk and convert them to trips
container['stops_store'],
region.bbox)
exporter.export(f"data/gtfs/amarillo.{region.id}.gtfs.zip", "data/tmp/")
def generate_gtfs_rt():
logger.info("Generate GTFS-RT")
producer = GtfsRtProducer(container['trips_store'])
for region in container['regions'].regions.values():
rt = producer.export_feed(time.time(), f"data/gtfs/amarillo.{region.id}.gtfsrt", bbox=region.bbox)
def start_schedule():
schedule.every().day.at("00:00").do(midnight)
schedule.every(60).seconds.do(generate_gtfs_rt)
# Create all feeds once at startup
schedule.run_all()
job_thread = threading.Thread(target=run_schedule, daemon=True)
job_thread.start()
def setup(app : FastAPI):
# TODO: Create all feeds once at startup
# configure_enhancer_services()
# app.include_router(router)
# start_schedule()
pass
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("enhancer")
#TODO: clean up metadata
app = FastAPI(title="Amarillo GTFS Generator",
description="This service allows carpool agencies to publish "
"their trip offers, so routing services may suggest "
"them as trip options. For carpool offers, only the "
"minimum required information (origin/destination, "
"optionally intermediate stops, departure time and a "
"deep link for booking/contacting the driver) needs to "
"be published, booking/contact exchange is to be "
"handled by the publishing agency.",
version="0.0.1",
# TODO 404
terms_of_service="http://mfdz.de/carpool-hub-terms/",
contact={
# "name": "unused",
# "url": "http://unused",
"email": "info@mfdz.de",
},
license_info={
"name": "AGPL-3.0 License",
"url": "https://www.gnu.org/licenses/agpl-3.0.de.html",
},
openapi_tags=[
{
"name": "carpool",
# "description": "Find out more about Amarillo - the carpooling intermediary",
"externalDocs": {
"description": "Find out more about Amarillo - the carpooling intermediary",
"url": "https://github.com/mfdz/amarillo",
},
}],
servers=[
{
"description": "MobiData BW Amarillo service",
"url": "https://amarillo.mobidata-bw.de"
},
{
"description": "DABB bbnavi Amarillo service",
"url": "https://amarillo.bbnavi.de"
},
{
"description": "Demo server by MFDZ",
"url": "https://amarillo.mfdz.de"
},
{
"description": "Dev server for development",
"url": "https://amarillo-dev.mfdz.de"
},
{
"description": "Server for Mitanand project",
"url": "https://mitanand.mfdz.de"
},
{
"description": "Localhost for development",
"url": "http://localhost:8000"
}
],
redoc_url=None
)
init()
@app.post("/",
operation_id="enhancecarpool",
summary="Add a new or update existing carpool",
description="Carpool object to be enhanced",
responses={
status.HTTP_404_NOT_FOUND: {
"description": "Agency does not exist"},
})
#TODO: add examples
async def post_carpool(carpool: Carpool = Body(...)):
logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
trips_store: TripStore = container['trips_store']
trip = trips_store._load_as_trip(carpool)
#TODO: carpool deleted endpoint
#TODO: gtfs, gtfs-rt endpoints
@app.get("/region/{region_id}/gtfs",
summary="Return GTFS Feed for this region",
response_description="GTFS-Feed (zip-file)",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
}
)
async def get_file(region_id: str):
_assert_region_exists(region_id)
generate_gtfs()
# verify_permission("gtfs", requesting_user)
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
#TODO: sync endpoint that calls midnight
@app.post("/sync",
operation_id="sync")
#TODO: add examples
async def post_sync():
logger.info(f"Sync")
midnight()

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: realtime_extension.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import amarillo.plugins.enhancer.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
# @@protoc_insertion_point(module_scope)

View file

@ -0,0 +1,407 @@
from datetime import time, date, datetime
from pydantic import ConfigDict, BaseModel, Field, HttpUrl, EmailStr
from typing import List, Union, Set, Optional, Tuple
from datetime import time
from pydantic import BaseModel, Field
from geojson_pydantic.geometries import LineString
from enum import Enum, IntEnum
NumType = Union[float, int]
MAX_STOPS_PER_TRIP = 100
class Weekday(str, Enum):
monday = "monday"
tuesday = "tuesday"
wednesday = "wednesday"
thursday = "thursday"
friday = "friday"
saturday = "saturday"
sunday = "sunday"
class PickupDropoffType(str, Enum):
pickup_and_dropoff = "pickup_and_dropoff"
only_pickup = "only_pickup"
only_dropoff = "only_dropoff"
class YesNoEnum(IntEnum):
yes = 1
no = 2
class LuggageSize(IntEnum):
small = 1
medium = 2
large = 3
class StopTime(BaseModel):
id: Optional[str] = Field(
None,
description="Optional Stop ID. If given, it should conform to the "
"IFOPT specification. For official transit stops, "
"it should be their official IFOPT. In Germany, this is "
"the DHID which is available via the 'zentrales "
"Haltestellenverzeichnis (zHV)', published by DELFI e.V. "
"Note, that currently carpooling location.",
pattern=r"^([a-zA-Z]{2,6}):\d+:\d+(:\d*(:\w+)?)?$|^osm:[nwr]\d+$",
examples=["de:12073:900340137::2"])
name: str = Field(
description="Name of the location. Use a name that people will "
"understand in the local and tourist vernacular.",
min_length=1,
max_length=256,
examples=["Angermünde, Breitscheidstr."])
departureTime: Optional[str] = Field(
None,
description="Departure time from a specific stop for a specific "
"carpool trip. For times occurring after midnight on the "
"service day, the time is given as a value greater than "
"24:00:00 in HH:MM:SS local time for the day on which the "
"trip schedule begins. If there are not separate times for "
"arrival and departure at a stop, the same value for arrivalTime "
"and departureTime. Note, that arrivalTime/departureTime of "
"the stops are not mandatory, and might then be estimated by "
"this service.",
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
examples=["17:00"]
)
arrivalTime: Optional[str] = Field(
None,
description="Arrival time at a specific stop for a specific trip on a "
"carpool route. If there are not separate times for arrival "
"and departure at a stop, enter the same value for arrivalTime "
"and departureTime. For times occurring after midnight on the "
"service day, the time as a value greater than 24:00:00 in "
"HH:MM:SS local time for the day on which the trip schedule "
"begins. Note, that arrivalTime/departureTime of the stops "
"are not mandatory, and might then be estimated by this "
"service.",
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
examples=["18:00"])
lat: float = Field(
description="Latitude of the location. Should describe the location "
"where a passenger may mount/dismount the vehicle.",
ge=-90,
lt=90,
examples=["53.0137311391"])
lon: float = Field(
description="Longitude of the location. Should describe the location "
"where a passenger may mount/dismount the vehicle.",
ge=-180,
lt=180,
examples=["13.9934706687"])
pickup_dropoff: Optional[PickupDropoffType] = Field(
None, description="If passengers may be picked up, dropped off or both at this stop. "
"If not specified, this service may assign this according to some custom rules. "
"E.g. Amarillo may allow pickup only for the first third of the distance travelled, "
"and dropoff only for the last third." ,
examples=["only_pickup"]
)
model_config = ConfigDict(json_schema_extra={
"example": "{'id': 'de:12073:900340137::2', 'name': "
"'Angermünde, Breitscheidstr.', 'lat': 53.0137311391, "
"'lon': 13.9934706687}"
})
class Region(BaseModel):
id: str = Field(
description="ID of the region.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["bb"])
bbox: Tuple[NumType, NumType, NumType, NumType] = Field(
description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]",
examples=[[10.5,49.2,11.3,51.3]])
class RidesharingInfo(BaseModel):
number_free_seats: int = Field(
description="Number of free seats",
ge=0,
examples=[3])
same_gender: Optional[YesNoEnum] = Field(
None,
description="Trip only for same gender:"
"1: Yes"
"2: No",
examples=[1])
luggage_size: Optional[LuggageSize] = Field(
None,
description="Size of the luggage:"
"1: small"
"2: medium"
"3: large",
examples=[3])
animal_car: Optional[YesNoEnum] = Field(
None,
description="Animals in Car allowed:"
"1: Yes"
"2: No",
examples=[2])
car_model: Optional[str] = Field(
None,
description="Car model",
min_length=1,
max_length=48,
examples=["Golf"])
car_brand: Optional[str] = Field(
None,
description="Car brand",
min_length=1,
max_length=48,
examples=["VW"])
creation_date: datetime = Field(
description="Date when trip was created",
examples=["2022-02-13T20:20:39+00:00"])
smoking: Optional[YesNoEnum] = Field(
None,
description="Smoking allowed:"
"1: Yes"
"2: No",
examples=[2])
payment_method: Optional[str] = Field(
None,
description="Method of payment",
min_length=1,
max_length=48)
class Driver(BaseModel):
driver_id: Optional[str] = Field(
None,
description="Identifies the driver.",
min_length=1,
max_length=256,
pattern='^[a-zA-Z0-9_-]+$',
examples=["789"])
profile_picture: Optional[HttpUrl] = Field(
None,
description="URL that contains the profile picture",
examples=["https://mfdz.de/driver/789/picture"])
rating: Optional[int] = Field(
None,
description="Rating of the driver from 1 to 5."
"0 no rating yet",
ge=0,
le=5,
examples=[5])
class Agency(BaseModel):
id: str = Field(
description="ID of the agency.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
name: str = Field(
description="Name",
min_length=1,
max_length=48,
pattern=r'^[\w -\.\|]+$',
examples=["MITFAHR|DE|ZENTRALE"])
url: HttpUrl = Field(
description="URL of the carpool agency.",
examples=["https://mfdz.de/"])
timezone: str = Field(
description="Timezone where the carpool agency is located.",
min_length=1,
max_length=48,
pattern=r'^[\w/]+$',
examples=["Europe/Berlin"])
lang: str = Field(
description="Primary language used by this carpool agency.",
min_length=1,
max_length=2,
pattern=r'^[a-zA-Z_]+$',
examples=["de"])
email: EmailStr = Field(
description="""Email address actively monitored by the agencys
customer service department. This email address should be a direct
contact point where carpool riders can reach a customer service
representative at the agency.""",
examples=["info@mfdz.de"])
terms_url: Optional[HttpUrl] = Field(
None, description="""A fully qualified URL pointing to the terms of service
(also often called "terms of use" or "terms and conditions")
for the service.""",
examples=["https://mfdz.de/nutzungsbedingungen"])
privacy_url: Optional[HttpUrl] = Field(
None, description="""A fully qualified URL pointing to the privacy policy for the service.""",
examples=["https://mfdz.de/datenschutz"])
model_config = ConfigDict(json_schema_extra={
"title": "Agency",
"description": "Carpool agency.",
"example":
#"""
{
"id": "mfdz",
"name": "MITFAHR|DE|ZENTRALE",
"url": "http://mfdz.de",
"timezone": "Europe/Berlin",
"lang": "de",
"email": "info@mfdz.de",
"terms_url": "https://mfdz.de/nutzungsbedingungen",
"privacy_url": "https://mfdz.de/datenschutz",
}
#"""
})
class Carpool(BaseModel):
id: str = Field(
description="ID of the carpool. Should be supplied and managed by the "
"carpooling platform which originally published this "
"offer.",
min_length=1,
max_length=256,
pattern='^[a-zA-Z0-9_-]+$',
examples=["103361"])
agency: str = Field(
description="Short one string name of the agency, used as a namespace "
"for ids.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
driver: Optional[Driver] = Field(
None,
description="Driver data",
examples=["""
{
"driver_id": "123",
"profile_picture": "https://mfdz.de/driver/789/picture",
"rating": 5
}
"""])
deeplink: HttpUrl = Field(
description="Link to an information page providing detail information "
"for this offer, and, especially, an option to book the "
"trip/contact the driver.",
examples=["https://mfdz.de/trip/103361"])
stops: List[StopTime] = Field(
...,
min_length=2,
max_length=MAX_STOPS_PER_TRIP,
description="Stops which this carpool passes by and offers to pick "
"up/drop off passengers. This list must at minimum "
"include two stops, the origin and destination of this "
"carpool trip. Note that for privacy reasons, the stops "
"usually should be official locations, like meeting "
"points, carpool parkings, ridesharing benches or "
"similar.",
examples=["""[
{
"id": "03",
"name": "drei",
"lat": 45,
"lon": 9
},
{
"id": "03b",
"name": "drei b",
"lat": 45,
"lon": 9
}
]"""])
# TODO can be removed, as first stop has departureTime as well
departureTime: time = Field(
description="Time when the carpool leaves at the first stop. Note, "
"that this API currently does not support flexible time "
"windows for departure, though drivers might be flexible."
"For recurring trips, the weekdays this trip will run. ",
examples=["17:00"])
# TODO think about using googlecal Format
departureDate: Union[date, Set[Weekday]] = Field(
description="Date when the trip will start, in case it is a one-time "
"trip. For recurring trips, specify weekdays. "
"Note, that when for different weekdays different "
"departureTimes apply, multiple carpool offers should be "
"published.",
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
'"sunday"]'])
route_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Route color designation that matches public facing material. "
"The color difference between route_color and route_text_color "
"should provide sufficient contrast when viewed on a black and "
"white screen.",
examples=["0039A6"])
route_text_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Legible color to use for text drawn against a background of "
"route_color. The color difference between route_color and "
"route_text_color should provide sufficient contrast when "
"viewed on a black and white screen.",
examples=["D4D2D2"])
path: Optional[LineString] = Field(
None, description="Optional route geometry as json LineString.")
lastUpdated: Optional[datetime] = Field(
None,
description="LastUpdated should reflect the last time, the user "
"providing this offer, made an update or confirmed, "
"the offer is still valid. Note that this service might "
"purge outdated offers (e.g. older than 180 days). If not "
"passed, the service may assume 'now'",
examples=["2022-02-13T20:20:39+00:00"])
additional_ridesharing_info: Optional[RidesharingInfo] = Field(
None,
description="Extension of GRFS to the GTFS standard",
examples=["""
{
"number_free_seats": 2,
"creation_date": "2022-02-13T20:20:39+00:00",
"same_gender": 2,
"smoking": 1,
"luggage_size": 3
}
"""])
model_config = ConfigDict(json_schema_extra={
"title": "Carpool",
# description ...
"example":
"""
{
"id": "1234",
"agency": "mfdz",
"deeplink": "http://mfdz.de",
"stops": [
{
"id": "de:12073:900340137::2", "name": "ABC",
"lat": 45, "lon": 9
},
{
"id": "de:12073:900340137::3", "name": "XYZ",
"lat": 45, "lon": 9
}
],
"departureTime": "12:34",
"departureDate": "2022-03-30",
"lastUpdated": "2022-03-30T12:34:00+00:00"
}
"""
})

View file

@ -0,0 +1,30 @@
# TODO: move to enhancer
from collections import namedtuple
from datetime import timedelta
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name route_color route_text_color')
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed')
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
# TODO Move to utils
class GtfsTimeDelta(timedelta):
def __str__(self):
seconds = self.total_seconds()
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = seconds % 60
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
return (str)
def __add__(self, other):
if isinstance(other, timedelta):
return self.__class__(self.days + other.days,
self.seconds + other.seconds,
self.microseconds + other.microseconds)
return NotImplemented

View file

@ -0,0 +1,68 @@
import logging
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Region
from amarillo.services.regions import RegionService
# from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.models.User import User
from amarillo.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
router = APIRouter()
# @router.post("/export")
# async def trigger_export(requesting_user: User = Depends(get_current_user)):
# verify_permission("generate-gtfs", requesting_user)
# #import is here to avoid circular import
# from amarillo.plugins.gtfs_export.gtfs_generator import generate_gtfs
# generate_gtfs()
#TODO: move to amarillo/utils?
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region
# @router.get("/region/{region_id}/gtfs",
# summary="Return GTFS Feed for this region",
# response_description="GTFS-Feed (zip-file)",
# response_class=FileResponse,
# responses={
# status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
# }
# )
# async def get_file(region_id: str, requesting_user: User = Depends(get_current_user)):
# verify_permission("gtfs", requesting_user)
# _assert_region_exists(region_id)
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
# @router.get("/region/{region_id}/gtfs-rt",
# summary="Return GTFS-RT Feed for this region",
# response_description="GTFS-RT-Feed",
# response_class=FileResponse,
# responses={
# status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
# status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
# }
# )
# async def get_file(region_id: str, format: str = 'protobuf', requesting_user: User = Depends(get_current_user)):
# verify_permission("gtfs", requesting_user)
# _assert_region_exists(region_id)
# if format == 'json':
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
# elif format == 'protobuf':
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
# else:
# message = "Specified format is not supported, i.e. neither protobuf nor json."
# raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)

View file

@ -0,0 +1,142 @@
from amarillo.tests.sampledata import carpool_1234, data1, carpool_repeating_json, stop_issue
from amarillo.plugins.enhancer.services.gtfs_export import GtfsExport
from amarillo.plugins.enhancer.services.gtfs import GtfsRtProducer
from amarillo.plugins.enhancer.services.stops import StopsStore
from amarillo.plugins.enhancer.services.trips import TripStore
from amarillo.models.Carpool import Carpool
from datetime import datetime
import time
import pytest
def test_gtfs_generation():
cp = Carpool(**data1)
stops_store = StopsStore()
trips_store = TripStore(stops_store)
trips_store.put_carpool(cp)
exporter = GtfsExport(None, None, trips_store, stops_store)
exporter.export('target/tests/test_gtfs_generation/test.gtfs.zip', "target/tests/test_gtfs_generation")
def test_correct_stops():
cp = Carpool(**stop_issue)
stops_store = StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 250}])
stops_store.load_stop_sources()
trips_store = TripStore(stops_store)
trips_store.put_carpool(cp)
assert len(trips_store.trips) == 1
class TestTripConverter:
def setup_method(self, method):
self.stops_store = StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
self.trips_store = TripStore(self.stops_store)
def test_as_one_time_trip_as_delete_update(self):
cp = Carpool(**data1)
self.trips_store.put_carpool(cp)
trip = next(iter(self.trips_store.trips.values()))
converter = GtfsRtProducer(self.trips_store)
json = converter._as_delete_updates(trip, datetime(2022,4,11))
assert json == [{
'trip': {
'tripId': 'mfdz:Eins',
'startTime': '23:59:00',
'startDate': '20220530',
'scheduleRelationship': 'CANCELED',
'routeId': 'mfdz:Eins'
}
}]
def test_as_one_time_trip_as_added_update(self):
cp = Carpool(**data1)
self.trips_store.put_carpool(cp)
trip = next(iter(self.trips_store.trips.values()))
converter = GtfsRtProducer(self.trips_store)
json = converter._as_added_updates(trip, datetime(2022,4,11))
assert json == [{
'trip': {
'tripId': 'mfdz:Eins',
'startTime': '23:59:00',
'startDate': '20220530',
'scheduleRelationship': 'ADDED',
'routeId': 'mfdz:Eins',
'[transit_realtime.trip_descriptor]': {
'routeUrl' : 'https://mfdz.de/trip/123',
'agencyId' : 'mfdz',
'route_long_name' : 'abc nach xyz',
'route_type': 1551
}
},
'stopTimeUpdate': [{
'stopSequence': 1,
'arrival': {
'time': time.mktime(datetime(2022,5,30,23,59,0).timetuple()),
'uncertainty': 600
},
'departure': {
'time': time.mktime(datetime(2022,5,30,23,59,0).timetuple()),
'uncertainty': 600
},
'stopId': 'mfdz:12073:001',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER'
}
}
},
{
'stopSequence': 2,
'arrival': {
'time': time.mktime(datetime(2022,5,31,0,16,45,0).timetuple()),
'uncertainty': 600
},
'departure': {
'time': time.mktime(datetime(2022,5,31,0,16,45,0).timetuple()),
'uncertainty': 600
},
'stopId': 'de:12073:900340137::3',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER',
'pickupType': 'NONE'
}
}
}]
}]
def test_as_periodic_trip_as_delete_update(self):
cp = Carpool(**carpool_repeating_json)
self.trips_store.put_carpool(cp)
trip = next(iter(self.trips_store.trips.values()))
converter = GtfsRtProducer(self.trips_store)
json = converter._as_delete_updates(trip, datetime(2022,4,11))
assert json == [{
'trip': {
'tripId': 'mfdz:Zwei',
'startTime': '15:00:00',
'startDate': '20220411',
'scheduleRelationship': 'CANCELED',
'routeId': 'mfdz:Zwei'
}
},
{
'trip': {
'tripId': 'mfdz:Zwei',
'startTime': '15:00:00',
'startDate': '20220418',
'scheduleRelationship': 'CANCELED',
'routeId': 'mfdz:Zwei'
}
}
]

7
pyproject.toml Normal file
View file

@ -0,0 +1,7 @@
[project]
name = "amarillo-gtfs-generator"
version = "0.0.1"
dependencies = []
[tool.setuptools.packages]
find = {}