commit
f863012f72
169
.gitignore
vendored
Normal file
169
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
data/
|
||||
secrets
|
||||
config
|
||||
static/**
|
||||
templates/**
|
||||
conf/**
|
||||
data
|
||||
36
.vscode/launch.json
vendored
Normal file
36
.vscode/launch.json
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
// {
|
||||
// "name": "Debug Tests",
|
||||
// "type": "debugpy",
|
||||
// "request": "launch",
|
||||
// "purpose": ["debug-test"],
|
||||
// "module": "pytest",
|
||||
// "console": "integratedTerminal",
|
||||
// "justMyCode": true,
|
||||
// "env": {
|
||||
// "_PYTEST_RAISE": "1"
|
||||
// },
|
||||
// },
|
||||
{
|
||||
"name": "Python: FastAPI",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"amarillo_gtfs_generator.gtfs_generator:app",
|
||||
"--workers=1",
|
||||
"--port=8002"
|
||||
],
|
||||
// "preLaunchTask": "enhance",
|
||||
"jinja": true,
|
||||
"justMyCode": false,
|
||||
"env": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
24
Dockerfile
Normal file
24
Dockerfile
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
FROM tiangolo/uvicorn-gunicorn:python3.10-slim
|
||||
|
||||
LABEL maintainer="info@mfdz.de"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
COPY requirements.txt /app/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
|
||||
|
||||
COPY ./amarillo_gtfs_generator /app/amarillo_gtfs_generator
|
||||
COPY ./logging.conf /app
|
||||
|
||||
ENV MODULE_NAME=amarillo_gtfs_generator.gtfs_generator
|
||||
ENV MAX_WORKERS=1
|
||||
|
||||
RUN useradd amarillo
|
||||
USER amarillo
|
||||
|
||||
# This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead
|
||||
# CMD ["uvicorn", "amarillo.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
#`docker run -it --rm --name amarillo-gtfs-generator -p 8002:80 -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo-gtfs-generator
|
||||
79
Jenkinsfile
vendored
Normal file
79
Jenkinsfile
vendored
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
pipeline {
|
||||
agent { label 'builtin' }
|
||||
environment {
|
||||
GITEA_CREDS = credentials('AMARILLO-JENKINS-GITEA-USER')
|
||||
PYPI_CREDS = credentials('AMARILLO-JENKINS-PYPI-USER')
|
||||
TWINE_REPO_URL = "https://git.gerhardt.io/api/packages/amarillo/pypi"
|
||||
DOCKER_REGISTRY_URL = 'https://git.gerhardt.io'
|
||||
OWNER = 'amarillo'
|
||||
IMAGE_NAME = 'amarillo-gtfs-generator'
|
||||
DISTRIBUTION = '0.1'
|
||||
TAG = "${DISTRIBUTION}.${BUILD_NUMBER}"
|
||||
}
|
||||
stages {
|
||||
stage('Create virtual environment') {
|
||||
steps {
|
||||
echo 'Creating virtual environment'
|
||||
sh '''python3 -m venv .venv
|
||||
. .venv/bin/activate'''
|
||||
}
|
||||
}
|
||||
stage('Installing requirements') {
|
||||
steps {
|
||||
echo 'Installing packages'
|
||||
sh 'python3 -m pip install -r requirements.txt'
|
||||
sh 'python3 -m pip install --upgrade build'
|
||||
sh 'python3 -m pip install --upgrade twine'
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
steps {
|
||||
echo 'Cleaning up dist directory'
|
||||
dir("dist") {
|
||||
deleteDir()
|
||||
}
|
||||
echo 'Building package'
|
||||
sh 'python3 -m build'
|
||||
}
|
||||
}
|
||||
stage('Publish package to GI') {
|
||||
steps {
|
||||
sh 'python3 -m twine upload --skip-existing --verbose --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*'
|
||||
}
|
||||
}
|
||||
stage('Publish package to PyPI') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
sh 'python3 -m twine upload --verbose --username $PYPI_CREDS_USR --password $PYPI_CREDS_PSW ./dist/*'
|
||||
}
|
||||
}
|
||||
stage('Build docker image') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
echo 'Building image'
|
||||
script {
|
||||
docker.build("${OWNER}/${IMAGE_NAME}:${TAG}")
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Push image to container registry') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
echo 'Pushing image to registry'
|
||||
script {
|
||||
docker.withRegistry(DOCKER_REGISTRY_URL, 'AMARILLO-JENKINS-GITEA-USER'){
|
||||
def image = docker.image("${OWNER}/${IMAGE_NAME}:${TAG}")
|
||||
image.push()
|
||||
image.push('latest')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
88
README.md
88
README.md
|
|
@ -1,2 +1,88 @@
|
|||
# amarillo-gtfs-generator
|
||||
Generate GTFS from carpools as standalone (Docker) service
|
||||
Generate GTFS from carpools as standalone (Docker) service.
|
||||
|
||||
This service complements the Amarillo application, creating GTFS and GTFS-RT data from the enhanced Amarillo carpool files.
|
||||
It is a non-public backend service called from the Amarillo FastAPI application.
|
||||
You can run it as part of docker compose, or separately using the instructions below.
|
||||
|
||||
# Overview
|
||||
|
||||

|
||||
|
||||
# Usage
|
||||
|
||||
## 1. Configuration
|
||||
|
||||
### Create `data/stop_sources.json`
|
||||
|
||||
Example contents:
|
||||
```json
|
||||
[
|
||||
{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50},
|
||||
{"url": "https://data.mfdz.de/mfdz/stops/stops_zhv.csv", "vicinity": 50},
|
||||
{"url": "https://data.mfdz.de/mfdz/stops/parkings_osm.csv", "vicinity": 500}
|
||||
]
|
||||
```
|
||||
|
||||
### Add region files `data/region`
|
||||
|
||||
File name should be `{region_id}.json`.
|
||||
|
||||
Example (`by.json`):
|
||||
```json
|
||||
{"id": "by", "bbox": [ 8.97, 47.28, 13.86, 50.56]}
|
||||
```
|
||||
For each region a separate GTFS zip file will be created in `/data/gtfs`, only containing the trips that intersect the region's bounding box.
|
||||
|
||||
### Add agency files `data/agency`
|
||||
|
||||
File name should be `{agency_id}.json`.
|
||||
|
||||
Example (`mfdz.json`):
|
||||
```json
|
||||
{
|
||||
"id": "mfdz",
|
||||
"name": "MITFAHR|DE|ZENTRALE",
|
||||
"url": "http://mfdz.de",
|
||||
"timezone": "Europe/Berlin",
|
||||
"lang": "de",
|
||||
"email": "info@mfdz.de"
|
||||
}
|
||||
```
|
||||
The generator will use this data to populate agency.txt in the GTFS output.
|
||||
|
||||
### Uvicorn configuration
|
||||
|
||||
`amarillo-gtfs-generator` uses `uvicorn` to run. Uvicorn can be configured as normal by passing in arguments such as `--port 8002` to change the port number.
|
||||
|
||||
## 2. Install the gtfs-exporter plugin for Amarillo
|
||||
|
||||
This is a separate service and not used by Amarillo by default. You should use the [amarillo-gtfs-exporter plugin](https://github.com/mfdz/amarillo-gtfs-exporter) which creates endpoints for `/region/{region_id}/gtfs` and `/region/{region_id}/gtfs-rt` on your Amarillo instance. These will serve the GTFS zip files from `data/gtfs`, or if they do not exist yet, they will call the configured generator and cache the results.
|
||||
|
||||
## 3. Add carpools to Amarillo
|
||||
|
||||
Use Amarillo's `/carpool` endpoint to create new carpools. The generator listens to file system events in the `/data/enhanced` folder to recognize newly added or deleted carpools. It will also discover existing carpools on startup. GTFS generation happens automatically on startup, at midnight on a schedule, and by sending a GET request to a `/region/{region_id}/gtfs` or `/region/{region_id}/gtfs-rt` endpoint.
|
||||
|
||||
Amarillo will use its configured enhancer to create enhanced carpool files. They will get picked up by the generator and they will be included in the next batch of generated GTFS data. Changes to carpools will be reflected immediately in the GTFS-RT output.
|
||||
|
||||
<!-- Q: how immediately? -->
|
||||
|
||||
# Run with uvicorn
|
||||
|
||||
- Python 3.10 with pip
|
||||
- python3-venv
|
||||
|
||||
Create a virtual environment `python3 -m venv venv`.
|
||||
|
||||
Activate the environment and install the dependencies `pip install -r requirements.txt`.
|
||||
|
||||
Run `uvicorn amarillo_gtfs_generator.gtfs_generator:app`.
|
||||
|
||||
In development, you can use `--reload`.
|
||||
|
||||
# Run with docker
|
||||
You can download a container image from the [MFDZ package registry](https://github.com/orgs/mfdz/packages?repo_name=amarillo-gtfs-generator).
|
||||
|
||||
Example command:
|
||||
```bash
|
||||
docker run -it --rm --name amarillo-gtfs-generator -p 8002:80 -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo-gtfs-generator```
|
||||
0
amarillo_gtfs_generator/__init__.py
Normal file
0
amarillo_gtfs_generator/__init__.py
Normal file
137
amarillo_gtfs_generator/gtfs.py
Normal file
137
amarillo_gtfs_generator/gtfs.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
from .gtfsrt import gtfs_realtime_pb2 as gtfs_realtime_pb2
|
||||
from .gtfsrt import realtime_extension_pb2 as mfdzrte
|
||||
from .gtfs_constants import *
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
from google.protobuf.json_format import ParseDict
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
|
||||
class GtfsRtProducer():
|
||||
|
||||
def __init__(self, trip_store):
|
||||
self.trip_store = trip_store
|
||||
|
||||
def generate_feed(self, time, format='protobuf', bbox=None):
|
||||
# See https://developers.google.com/transit/gtfs-realtime/reference
|
||||
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
|
||||
gtfsrt_dict = {
|
||||
'header': {
|
||||
'gtfsRealtimeVersion': '1.0',
|
||||
'timestamp': int(time)
|
||||
},
|
||||
'entity': self._get_trip_updates(bbox)
|
||||
}
|
||||
feed = gtfs_realtime_pb2.FeedMessage()
|
||||
ParseDict(gtfsrt_dict, feed)
|
||||
|
||||
if "message" == format.lower():
|
||||
return feed
|
||||
elif "json" == format.lower():
|
||||
return MessageToDict(feed)
|
||||
else:
|
||||
return feed.SerializeToString()
|
||||
|
||||
def export_feed(self, timestamp, file_path, bbox=None):
|
||||
"""
|
||||
Exports gtfs-rt feed as .json and .pbf file to file_path
|
||||
"""
|
||||
feed = self.generate_feed(timestamp, "message", bbox)
|
||||
with open(f"{file_path}.pbf", "wb") as f:
|
||||
f.write(feed.SerializeToString())
|
||||
with open(f"{file_path}.json", "w") as f:
|
||||
json.dump(MessageToDict(feed), f)
|
||||
|
||||
def _get_trip_updates(self, bbox = None):
|
||||
trips = []
|
||||
trips.extend(self._get_added(bbox))
|
||||
trips.extend(self._get_deleted(bbox))
|
||||
trip_updates = []
|
||||
for num, trip in enumerate(trips):
|
||||
trip_updates.append( {
|
||||
'id': f'carpool-update-{num}',
|
||||
'tripUpdate': trip
|
||||
}
|
||||
)
|
||||
return trip_updates
|
||||
|
||||
def _get_deleted(self, bbox = None):
|
||||
return self._get_updates(
|
||||
self.trip_store.recently_deleted_trips(),
|
||||
self._as_delete_updates,
|
||||
bbox)
|
||||
|
||||
def _get_added(self, bbox = None):
|
||||
return self._get_updates(
|
||||
self.trip_store.recently_added_trips(),
|
||||
self._as_added_updates,
|
||||
bbox)
|
||||
|
||||
def _get_updates(self, trips, update_func, bbox = None):
|
||||
updates = []
|
||||
today = datetime.today()
|
||||
for t in trips:
|
||||
if bbox == None or t.intersects(bbox):
|
||||
updates.extend(update_func(t, today))
|
||||
return updates
|
||||
|
||||
def _as_delete_updates(self, trip, fromdate):
|
||||
return [{
|
||||
'trip': {
|
||||
'tripId': trip.trip_id,
|
||||
'startTime': trip.start_time_str(),
|
||||
'startDate': trip_date,
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': trip.trip_id
|
||||
}
|
||||
} for trip_date in trip.next_trip_dates(fromdate)]
|
||||
|
||||
def _to_seconds(self, fromdate, stop_time):
|
||||
startdate = datetime.strptime(fromdate, '%Y%m%d')
|
||||
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
|
||||
delta = timedelta(
|
||||
hours=int(m.group(1)),
|
||||
minutes=int(m.group(2)),
|
||||
seconds=int(m.group(3)))
|
||||
return time.mktime((startdate + delta).timetuple())
|
||||
|
||||
def _to_stop_times(self, trip, fromdate):
|
||||
return [{
|
||||
'stopSequence': stoptime.stop_sequence,
|
||||
'arrival': {
|
||||
'time': self._to_seconds(fromdate, stoptime.arrival_time),
|
||||
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
|
||||
},
|
||||
'departure': {
|
||||
'time': self._to_seconds(fromdate, stoptime.departure_time),
|
||||
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
|
||||
},
|
||||
'stopId': stoptime.stop_id,
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
|
||||
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
|
||||
}
|
||||
}
|
||||
}
|
||||
for stoptime in trip.stop_times]
|
||||
|
||||
def _as_added_updates(self, trip, fromdate):
|
||||
return [{
|
||||
'trip': {
|
||||
'tripId': trip.trip_id,
|
||||
'startTime': trip.start_time_str(),
|
||||
'startDate': trip_date,
|
||||
'scheduleRelationship': 'ADDED',
|
||||
'routeId': trip.trip_id,
|
||||
'[transit_realtime.trip_descriptor]': {
|
||||
'routeUrl' : trip.url,
|
||||
'agencyId' : trip.agency,
|
||||
'route_long_name' : trip.route_long_name(),
|
||||
'route_type': RIDESHARING_ROUTE_TYPE
|
||||
}
|
||||
},
|
||||
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
|
||||
} for trip_date in trip.next_trip_dates(fromdate)]
|
||||
14
amarillo_gtfs_generator/gtfs_constants.py
Normal file
14
amarillo_gtfs_generator/gtfs_constants.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Constants
|
||||
|
||||
NO_BIKES_ALLOWED = 2
|
||||
RIDESHARING_ROUTE_TYPE = 1551
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
|
||||
STOP_TIMES_STOP_TYPE_REGULARLY = 0
|
||||
STOP_TIMES_STOP_TYPE_NONE = 1
|
||||
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
|
||||
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
|
||||
STOP_TIMES_TIMEPOINT_EXACT = 1
|
||||
|
||||
MFDZ_DEFAULT_UNCERTAINITY = 600
|
||||
230
amarillo_gtfs_generator/gtfs_export.py
Normal file
230
amarillo_gtfs_generator/gtfs_export.py
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
|
||||
from collections.abc import Iterable
|
||||
from datetime import datetime, timedelta
|
||||
from zipfile import ZipFile
|
||||
import csv
|
||||
import gettext
|
||||
import logging
|
||||
import re
|
||||
|
||||
from amarillo.utils.utils import assert_folder_exists
|
||||
from .models.gtfs import GtfsTimeDelta, GtfsFeedInfo, GtfsAgency, GtfsRoute, GtfsStop, GtfsStopTime, GtfsTrip, GtfsCalendar, GtfsCalendarDate, GtfsShape
|
||||
from amarillo_stops.stops import is_carpooling_stop
|
||||
from .gtfs_constants import *
|
||||
from .models.Carpool import Agency
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class GtfsExport:
|
||||
|
||||
stops_counter = 0
|
||||
trips_counter = 0
|
||||
routes_counter = 0
|
||||
|
||||
stored_stops = {}
|
||||
|
||||
def __init__(self, agencies: dict[str, Agency], feed_info, ridestore, stopstore, bbox = None):
|
||||
self.stops = {}
|
||||
self.routes = []
|
||||
self.calendar_dates = []
|
||||
self.calendar = []
|
||||
self.trips = []
|
||||
self.stop_times = []
|
||||
self.calendar = []
|
||||
self.shapes = []
|
||||
self.agencies = [GtfsAgency(a.id, a.name, a.url, a.timezone, a.lang, a.email) for a in agencies.values()]
|
||||
self.feed_info = feed_info
|
||||
self.localized_to = " nach "
|
||||
self.localized_short_name = "Mitfahrgelegenheit"
|
||||
self.stopstore = stopstore
|
||||
self.ridestore = ridestore
|
||||
self.bbox = bbox
|
||||
|
||||
def export(self, gtfszip_filename, gtfsfolder):
|
||||
assert_folder_exists(gtfsfolder)
|
||||
self._prepare_gtfs_feed(self.ridestore, self.stopstore)
|
||||
self._write_csvfile(gtfsfolder, 'agency.txt', self.agencies)
|
||||
self._write_csvfile(gtfsfolder, 'feed_info.txt', self.feed_info)
|
||||
self._write_csvfile(gtfsfolder, 'routes.txt', self.routes)
|
||||
self._write_csvfile(gtfsfolder, 'trips.txt', self.trips)
|
||||
self._write_csvfile(gtfsfolder, 'calendar.txt', self.calendar)
|
||||
self._write_csvfile(gtfsfolder, 'calendar_dates.txt', self.calendar_dates)
|
||||
self._write_csvfile(gtfsfolder, 'stops.txt', self.stops.values())
|
||||
self._write_csvfile(gtfsfolder, 'stop_times.txt', self.stop_times)
|
||||
self._write_csvfile(gtfsfolder, 'shapes.txt', self.shapes)
|
||||
self._zip_files(gtfszip_filename, gtfsfolder)
|
||||
|
||||
def _zip_files(self, gtfszip_filename, gtfsfolder):
|
||||
gtfsfiles = ['agency.txt', 'feed_info.txt', 'routes.txt', 'trips.txt',
|
||||
'calendar.txt', 'calendar_dates.txt', 'stops.txt', 'stop_times.txt', 'shapes.txt']
|
||||
with ZipFile(gtfszip_filename, 'w') as gtfszip:
|
||||
for gtfsfile in gtfsfiles:
|
||||
gtfszip.write(gtfsfolder+'/'+gtfsfile, gtfsfile)
|
||||
|
||||
def _prepare_gtfs_feed(self, ridestore, stopstore):
|
||||
"""
|
||||
Prepares all gtfs objects in memory before they are written
|
||||
to their respective streams.
|
||||
|
||||
For all wellknown stops a GTFS stop is created and
|
||||
afterwards all ride offers are transformed into their
|
||||
gtfs equivalents.
|
||||
"""
|
||||
for stopSet in stopstore.stopsDataFrames:
|
||||
for stop in stopSet["stops"].itertuples():
|
||||
self._load_stored_stop(stop)
|
||||
cloned_trips = dict(ridestore.trips)
|
||||
for _, trip in cloned_trips.items():
|
||||
if self.bbox is None or trip.intersects(self.bbox):
|
||||
self._convert_trip(trip)
|
||||
|
||||
def _convert_trip(self, trip):
|
||||
self.routes_counter += 1
|
||||
self.routes.append(self._create_route(trip))
|
||||
self.calendar.append(self._create_calendar(trip))
|
||||
if not trip.runs_regularly:
|
||||
self.calendar_dates.append(self._create_calendar_date(trip))
|
||||
self.trips.append(self._create_trip(trip, self.routes_counter))
|
||||
self._append_stops_and_stop_times(trip)
|
||||
self._append_shapes(trip, self.routes_counter)
|
||||
|
||||
def _trip_headsign(self, destination):
|
||||
destination = destination.replace('(Deutschland)', '')
|
||||
destination = destination.replace(', Deutschland', '')
|
||||
appendix = ''
|
||||
if 'Schweiz' in destination or 'Switzerland' in destination:
|
||||
appendix = ', Schweiz'
|
||||
destination = destination.replace('(Schweiz)', '')
|
||||
destination = destination.replace(', Schweiz', '')
|
||||
destination = destination.replace('(Switzerland)', '')
|
||||
|
||||
try:
|
||||
matches = re.match(r"(.*,)? ?(\d{4,5})? ?(.*)", destination)
|
||||
|
||||
match = matches.group(3).strip() if matches != None else destination.strip()
|
||||
if match[-1]==')' and not '(' in match:
|
||||
match = match[0:-1]
|
||||
|
||||
return match + appendix
|
||||
except Exception as ex:
|
||||
logger.error("error for "+destination )
|
||||
logger.exception(ex)
|
||||
return destination
|
||||
|
||||
def _create_route(self, trip):
|
||||
return GtfsRoute(trip.agency, trip.trip_id, trip.route_long_name(), RIDESHARING_ROUTE_TYPE, trip.url, "", trip.route_color, trip.route_text_color)
|
||||
|
||||
def _create_calendar(self, trip):
|
||||
# TODO currently, calendar is not provided by Fahrgemeinschaft.de interface.
|
||||
# We could apply some heuristics like requesting multiple days and extrapolate
|
||||
# if multiple trips are found, but better would be to have these provided by the
|
||||
# offical interface. Then validity periods should be provided as well (not
|
||||
# sure if these are available)
|
||||
# For fahrgemeinschaft.de, regurlar trips are recognizable via their url
|
||||
# which contains "regelmaessig". However, we don't know on which days of the week,
|
||||
# nor until when. As a first guess, if datetime is a mo-fr, we assume each workday,
|
||||
# if it's sa/su, only this...
|
||||
|
||||
feed_start_date = datetime.today()
|
||||
stop_date = self._convert_stop_date(feed_start_date)
|
||||
return GtfsCalendar(trip.trip_id, stop_date, self._convert_stop_date(feed_start_date + timedelta(days=31)), *(trip.weekdays))
|
||||
|
||||
def _create_calendar_date(self, trip):
|
||||
return GtfsCalendarDate(trip.trip_id, self._convert_stop_date(trip.start), CALENDAR_DATES_EXCEPTION_TYPE_ADDED)
|
||||
|
||||
def _create_trip(self, trip, shape_id):
|
||||
return GtfsTrip(trip.trip_id, trip.trip_id, trip.trip_id, shape_id, trip.trip_headsign, NO_BIKES_ALLOWED)
|
||||
|
||||
def _convert_stop(self, stop):
|
||||
"""
|
||||
Converts a stop represented as pandas row to a gtfs stop.
|
||||
Expected attributes of stop: id, stop_name, x, y (in wgs84)
|
||||
"""
|
||||
if stop.id:
|
||||
id = stop.id
|
||||
else:
|
||||
self.stops_counter += 1
|
||||
id = "tmp-{}".format(self.stops_counter)
|
||||
|
||||
stop_name = "k.A." if stop.stop_name is None else stop.stop_name
|
||||
return GtfsStop(id, stop.y, stop.x, stop_name)
|
||||
|
||||
def _append_stops_and_stop_times(self, trip):
|
||||
# Assumptions:
|
||||
# arrival_time = departure_time
|
||||
# pickup_type, drop_off_type for origin: = coordinate/none
|
||||
# pickup_type, drop_off_type for destination: = none/coordinate
|
||||
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
|
||||
for stop_time in trip.stop_times:
|
||||
# retrieve stop from stored_stops and mark it to be exported
|
||||
wkn_stop = self.stored_stops.get(stop_time.stop_id)
|
||||
if not wkn_stop:
|
||||
logger.warning("No stop found in stop_store for %s. Will skip stop_time %s of trip %s", stop_time.stop_id, stop_time.stop_sequence, trip.trip_id)
|
||||
else:
|
||||
self.stops[stop_time.stop_id] = wkn_stop
|
||||
# Append stop_time
|
||||
self.stop_times.append(stop_time)
|
||||
|
||||
def _append_shapes(self, trip, shape_id):
|
||||
counter = 0
|
||||
for point in trip.path.coordinates:
|
||||
counter += 1
|
||||
self.shapes.append(GtfsShape(shape_id, point[0], point[1], counter))
|
||||
|
||||
def _stop_hash(self, stop):
|
||||
return "{}#{}#{}".format(stop.stop_name,stop.x,stop.y)
|
||||
|
||||
def _should_always_export(self, stop):
|
||||
"""
|
||||
Returns true, if the given stop shall be exported to GTFS,
|
||||
regardless, if it's part of a trip or not.
|
||||
|
||||
This is necessary, as potential stops are required
|
||||
to be part of the GTFS to be referenced later on
|
||||
by dynamicly added trips.
|
||||
"""
|
||||
if self.bbox:
|
||||
return (self.bbox[0] <= stop.stop_lon <= self.bbox[2] and
|
||||
self.bbox[1] <= stop.stop_lat <= self.bbox[3])
|
||||
else:
|
||||
return is_carpooling_stop(stop.stop_id, stop.stop_name)
|
||||
|
||||
def _load_stored_stop(self, stop):
|
||||
gtfsstop = self._convert_stop(stop)
|
||||
stop_hash = self._stop_hash(stop)
|
||||
self.stored_stops[gtfsstop.stop_id] = gtfsstop
|
||||
if self._should_always_export(gtfsstop):
|
||||
self.stops[gtfsstop.stop_id] = gtfsstop
|
||||
|
||||
def _get_stop_by_hash(self, stop_hash):
|
||||
return self.stops.get(stop_hash, self.stored_stops.get(stop_hash))
|
||||
|
||||
def _get_or_create_stop(self, stop):
|
||||
stop_hash = self._stop_hash(stop)
|
||||
gtfsstop = self.stops.get(stop_hash)
|
||||
if gtfsstop is None:
|
||||
gtfsstop = self.stored_stops.get(stop_hash, self._convert_stop(stop))
|
||||
self.stops[stop_hash] = gtfsstop
|
||||
return gtfsstop
|
||||
|
||||
def _convert_stop_date(self, date_time):
|
||||
return date_time.strftime("%Y%m%d")
|
||||
|
||||
def _write_csvfile(self, gtfsfolder, filename, content):
|
||||
with open(gtfsfolder+"/"+filename, 'w', newline="\n", encoding="utf-8") as csvfile:
|
||||
self._write_csv(csvfile, content)
|
||||
|
||||
def _write_csv(self, csvfile, content):
|
||||
if hasattr(content, '_fields'):
|
||||
writer = csv.DictWriter(csvfile, content._fields)
|
||||
writer.writeheader()
|
||||
writer.writerow(content._asdict())
|
||||
else:
|
||||
if content:
|
||||
writer = csv.DictWriter(csvfile, next(iter(content))._fields)
|
||||
writer.writeheader()
|
||||
for record in content:
|
||||
writer.writerow(record._asdict())
|
||||
|
||||
|
||||
279
amarillo_gtfs_generator/gtfs_generator.py
Normal file
279
amarillo_gtfs_generator/gtfs_generator.py
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
from fastapi import FastAPI, Body, HTTPException, status
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from .gtfs_export import GtfsExport, GtfsFeedInfo, GtfsAgency
|
||||
from .gtfs import GtfsRtProducer
|
||||
from amarillo.utils.container import container
|
||||
# from amarillo.plugins.gtfs_export.router import router
|
||||
# from amarillo.plugins.enhancer.configuration import configure_enhancer_services
|
||||
from glob import glob
|
||||
import json
|
||||
import schedule
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import os
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from .models.Carpool import Carpool, Region
|
||||
from .router import _assert_region_exists
|
||||
from amarillo_stops import stops
|
||||
from .services.trips import TripStore, Trip
|
||||
from .services.carpools import CarpoolService
|
||||
from .services.agencies import AgencyService
|
||||
from .services.regions import RegionService
|
||||
from amarillo.utils.utils import agency_carpool_ids_from_filename
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class EventHandler(FileSystemEventHandler):
|
||||
|
||||
def on_closed(self, event):
|
||||
|
||||
logger.info("CLOSE_WRITE: Created %s", event.src_path)
|
||||
try:
|
||||
with open(event.src_path, 'r', encoding='utf-8') as f:
|
||||
dict = json.load(f)
|
||||
carpool = Carpool(**dict)
|
||||
|
||||
container['carpools'].put(carpool.agency, carpool.id, carpool)
|
||||
except FileNotFoundError as e:
|
||||
logger.error("Carpool could not be added, as already deleted (%s)", event.src_path)
|
||||
except:
|
||||
logger.exception("Eventhandler on_closed encountered exception")
|
||||
|
||||
def on_deleted(self, event):
|
||||
try:
|
||||
logger.info("DELETE: Removing %s", event.src_path)
|
||||
(agency_id, carpool_id) = agency_carpool_ids_from_filename(event.src_path)
|
||||
container['carpools'].delete(agency_id, carpool_id)
|
||||
except:
|
||||
logger.exception("Eventhandler on_deleted encountered exception")
|
||||
|
||||
|
||||
|
||||
def init():
|
||||
logger.info(f"Current working directory is {os.path.abspath(os.getcwd())}")
|
||||
if not os.path.isdir('data/agency'):
|
||||
logger.error(f'{os.path.abspath("data/agency")} directory does not exist')
|
||||
|
||||
|
||||
container['agencies'] = AgencyService()
|
||||
logger.info("Loaded %d agencies", len(container['agencies'].agencies))
|
||||
|
||||
container['regions'] = RegionService()
|
||||
logger.info("Loaded %d regions", len(container['regions'].regions))
|
||||
|
||||
|
||||
logger.info("Load stops...")
|
||||
with open('data/stop_sources.json') as stop_sources_file:
|
||||
stop_sources = json.load(stop_sources_file)
|
||||
stop_store = stops.StopsStore(stop_sources)
|
||||
|
||||
stop_store.load_stop_sources()
|
||||
# TODO: do we need container?
|
||||
container['stops_store'] = stop_store
|
||||
container['trips_store'] = TripStore(stop_store)
|
||||
|
||||
# TODO: the carpool service may be obsolete
|
||||
container['carpools'] = CarpoolService(container['trips_store'])
|
||||
|
||||
logger.info("Restore carpools...")
|
||||
|
||||
for agency_id in container['agencies'].agencies:
|
||||
for carpool_file_name in glob(f'data/enhanced/{agency_id}/*.json'):
|
||||
try:
|
||||
with open(carpool_file_name) as carpool_file:
|
||||
carpool = Carpool(**(json.load(carpool_file)))
|
||||
#TODO: convert to trip and add to tripstore directly
|
||||
container['carpools'].put(carpool.agency, carpool.id, carpool)
|
||||
logger.info(f"Restored carpool {carpool_file_name}")
|
||||
except Exception as e:
|
||||
logger.warning("Issue during restore of carpool %s: %s", carpool_file_name, repr(e))
|
||||
|
||||
observer = Observer() # Watch Manager
|
||||
|
||||
observer.schedule(EventHandler(), 'data/enhanced', recursive=True)
|
||||
observer.start()
|
||||
start_schedule()
|
||||
|
||||
generate_gtfs()
|
||||
|
||||
|
||||
def run_schedule():
|
||||
while 1:
|
||||
try:
|
||||
schedule.run_pending()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
time.sleep(1)
|
||||
|
||||
def midnight():
|
||||
container['stops_store'].load_stop_sources()
|
||||
container['trips_store'].unflag_unrecent_updates()
|
||||
container['carpools'].purge_outdated_offers()
|
||||
|
||||
generate_gtfs()
|
||||
|
||||
#TODO: generate for a specific region only
|
||||
#TODO: what happens when there are no trips?
|
||||
def generate_gtfs():
|
||||
logger.info("Generate GTFS")
|
||||
|
||||
for region in container['regions'].regions.values():
|
||||
# TODO make feed producer infos configurable
|
||||
feed_info = GtfsFeedInfo('mfdz', 'MITFAHR|DE|ZENTRALE', 'http://www.mitfahrdezentrale.de', 'de', 1)
|
||||
exporter = GtfsExport(
|
||||
container['agencies'].agencies,
|
||||
feed_info,
|
||||
container['trips_store'],
|
||||
container['stops_store'],
|
||||
region.bbox)
|
||||
exporter.export(f"data/gtfs/amarillo.{region.id}.gtfs.zip", "data/tmp/")
|
||||
|
||||
def generate_gtfs_rt():
|
||||
logger.info("Generate GTFS-RT")
|
||||
producer = GtfsRtProducer(container['trips_store'])
|
||||
for region in container['regions'].regions.values():
|
||||
rt = producer.export_feed(time.time(), f"data/gtfs/amarillo.{region.id}.gtfsrt", bbox=region.bbox)
|
||||
|
||||
def start_schedule():
|
||||
schedule.every().day.at("00:00").do(midnight)
|
||||
# schedule.every(60).seconds.do(generate_gtfs_rt)
|
||||
# Create all feeds once at startup
|
||||
# schedule.run_all()
|
||||
job_thread = threading.Thread(target=run_schedule, daemon=True)
|
||||
job_thread.start()
|
||||
|
||||
|
||||
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
|
||||
logger = logging.getLogger("gtfs-generator")
|
||||
|
||||
#TODO: clean up metadata
|
||||
app = FastAPI(title="Amarillo GTFS Generator",
|
||||
description="This service allows carpool agencies to publish "
|
||||
"their trip offers, so routing services may suggest "
|
||||
"them as trip options. For carpool offers, only the "
|
||||
"minimum required information (origin/destination, "
|
||||
"optionally intermediate stops, departure time and a "
|
||||
"deep link for booking/contacting the driver) needs to "
|
||||
"be published, booking/contact exchange is to be "
|
||||
"handled by the publishing agency.",
|
||||
version="0.0.1",
|
||||
# TODO 404
|
||||
terms_of_service="http://mfdz.de/carpool-hub-terms/",
|
||||
contact={
|
||||
# "name": "unused",
|
||||
# "url": "http://unused",
|
||||
"email": "info@mfdz.de",
|
||||
},
|
||||
license_info={
|
||||
"name": "AGPL-3.0 License",
|
||||
"url": "https://www.gnu.org/licenses/agpl-3.0.de.html",
|
||||
},
|
||||
openapi_tags=[
|
||||
{
|
||||
"name": "carpool",
|
||||
# "description": "Find out more about Amarillo - the carpooling intermediary",
|
||||
"externalDocs": {
|
||||
"description": "Find out more about Amarillo - the carpooling intermediary",
|
||||
"url": "https://github.com/mfdz/amarillo",
|
||||
},
|
||||
}],
|
||||
servers=[
|
||||
{
|
||||
"description": "MobiData BW Amarillo service",
|
||||
"url": "https://amarillo.mobidata-bw.de"
|
||||
},
|
||||
{
|
||||
"description": "DABB bbnavi Amarillo service",
|
||||
"url": "https://amarillo.bbnavi.de"
|
||||
},
|
||||
{
|
||||
"description": "Demo server by MFDZ",
|
||||
"url": "https://amarillo.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Dev server for development",
|
||||
"url": "https://amarillo-dev.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Server for Mitanand project",
|
||||
"url": "https://mitanand.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Localhost for development",
|
||||
"url": "http://localhost:8000"
|
||||
}
|
||||
],
|
||||
redoc_url=None
|
||||
)
|
||||
|
||||
init()
|
||||
|
||||
# @app.post("/",
|
||||
# operation_id="enhancecarpool",
|
||||
# summary="Add a new or update existing carpool",
|
||||
# description="Carpool object to be enhanced",
|
||||
# responses={
|
||||
# status.HTTP_404_NOT_FOUND: {
|
||||
# "description": "Agency does not exist"},
|
||||
|
||||
# })
|
||||
#TODO: add examples
|
||||
# async def post_carpool(carpool: Carpool = Body(...)):
|
||||
|
||||
# logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
|
||||
|
||||
# trips_store: TripStore = container['trips_store']
|
||||
# trip = trips_store._load_as_trip(carpool)
|
||||
|
||||
#TODO: carpool deleted endpoint
|
||||
|
||||
#TODO: gtfs, gtfs-rt endpoints
|
||||
|
||||
@app.get("/region/{region_id}/gtfs",
|
||||
summary="Return GTFS Feed for this region",
|
||||
response_description="GTFS-Feed (zip-file)",
|
||||
response_class=FileResponse,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
}
|
||||
)
|
||||
async def get_file(region_id: str):
|
||||
_assert_region_exists(region_id)
|
||||
generate_gtfs()
|
||||
# verify_permission("gtfs", requesting_user)
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
|
||||
|
||||
@app.get("/region/{region_id}/gtfs-rt/",
|
||||
summary="Return GTFS-RT Feed for this region",
|
||||
response_description="GTFS-RT-Feed",
|
||||
response_class=FileResponse,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
|
||||
}
|
||||
)
|
||||
async def get_file(region_id: str, format: str = 'protobuf'):
|
||||
generate_gtfs_rt()
|
||||
_assert_region_exists(region_id)
|
||||
if format == 'json':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
|
||||
elif format == 'protobuf':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
|
||||
else:
|
||||
message = "Specified format is not supported, i.e. neither protobuf nor json."
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
#TODO: sync endpoint that calls midnight
|
||||
|
||||
@app.post("/sync",
|
||||
operation_id="sync")
|
||||
#TODO: add examples
|
||||
async def post_sync():
|
||||
|
||||
logger.info(f"Sync")
|
||||
|
||||
midnight()
|
||||
0
amarillo_gtfs_generator/gtfsrt/__init__.py
Normal file
0
amarillo_gtfs_generator/gtfsrt/__init__.py
Normal file
80
amarillo_gtfs_generator/gtfsrt/gtfs_realtime_pb2.py
Normal file
80
amarillo_gtfs_generator/gtfsrt/gtfs_realtime_pb2.py
Normal file
File diff suppressed because one or more lines are too long
33
amarillo_gtfs_generator/gtfsrt/realtime_extension_pb2.py
Normal file
33
amarillo_gtfs_generator/gtfsrt/realtime_extension_pb2.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: realtime_extension.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from . import gtfs_realtime_pb2 as gtfs__realtime__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
|
||||
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
|
||||
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
|
||||
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
407
amarillo_gtfs_generator/models/Carpool.py
Normal file
407
amarillo_gtfs_generator/models/Carpool.py
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
from datetime import time, date, datetime
|
||||
from pydantic import ConfigDict, BaseModel, Field, HttpUrl, EmailStr
|
||||
from typing import List, Union, Set, Optional, Tuple
|
||||
from datetime import time
|
||||
from pydantic import BaseModel, Field
|
||||
from geojson_pydantic.geometries import LineString
|
||||
from enum import Enum, IntEnum
|
||||
|
||||
NumType = Union[float, int]
|
||||
|
||||
MAX_STOPS_PER_TRIP = 100
|
||||
|
||||
class Weekday(str, Enum):
|
||||
monday = "monday"
|
||||
tuesday = "tuesday"
|
||||
wednesday = "wednesday"
|
||||
thursday = "thursday"
|
||||
friday = "friday"
|
||||
saturday = "saturday"
|
||||
sunday = "sunday"
|
||||
|
||||
class PickupDropoffType(str, Enum):
|
||||
pickup_and_dropoff = "pickup_and_dropoff"
|
||||
only_pickup = "only_pickup"
|
||||
only_dropoff = "only_dropoff"
|
||||
|
||||
class YesNoEnum(IntEnum):
|
||||
yes = 1
|
||||
no = 2
|
||||
|
||||
class LuggageSize(IntEnum):
|
||||
small = 1
|
||||
medium = 2
|
||||
large = 3
|
||||
|
||||
class StopTime(BaseModel):
|
||||
id: Optional[str] = Field(
|
||||
None,
|
||||
description="Optional Stop ID. If given, it should conform to the "
|
||||
"IFOPT specification. For official transit stops, "
|
||||
"it should be their official IFOPT. In Germany, this is "
|
||||
"the DHID which is available via the 'zentrales "
|
||||
"Haltestellenverzeichnis (zHV)', published by DELFI e.V. "
|
||||
"Note, that currently carpooling location.",
|
||||
pattern=r"^([a-zA-Z]{2,6}):\d+:\d+(:\d*(:\w+)?)?$|^osm:[nwr]\d+$",
|
||||
examples=["de:12073:900340137::2"])
|
||||
|
||||
name: str = Field(
|
||||
description="Name of the location. Use a name that people will "
|
||||
"understand in the local and tourist vernacular.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
examples=["Angermünde, Breitscheidstr."])
|
||||
|
||||
departureTime: Optional[str] = Field(
|
||||
None,
|
||||
description="Departure time from a specific stop for a specific "
|
||||
"carpool trip. For times occurring after midnight on the "
|
||||
"service day, the time is given as a value greater than "
|
||||
"24:00:00 in HH:MM:SS local time for the day on which the "
|
||||
"trip schedule begins. If there are not separate times for "
|
||||
"arrival and departure at a stop, the same value for arrivalTime "
|
||||
"and departureTime. Note, that arrivalTime/departureTime of "
|
||||
"the stops are not mandatory, and might then be estimated by "
|
||||
"this service.",
|
||||
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
|
||||
examples=["17:00"]
|
||||
)
|
||||
|
||||
arrivalTime: Optional[str] = Field(
|
||||
None,
|
||||
description="Arrival time at a specific stop for a specific trip on a "
|
||||
"carpool route. If there are not separate times for arrival "
|
||||
"and departure at a stop, enter the same value for arrivalTime "
|
||||
"and departureTime. For times occurring after midnight on the "
|
||||
"service day, the time as a value greater than 24:00:00 in "
|
||||
"HH:MM:SS local time for the day on which the trip schedule "
|
||||
"begins. Note, that arrivalTime/departureTime of the stops "
|
||||
"are not mandatory, and might then be estimated by this "
|
||||
"service.",
|
||||
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
|
||||
examples=["18:00"])
|
||||
|
||||
lat: float = Field(
|
||||
description="Latitude of the location. Should describe the location "
|
||||
"where a passenger may mount/dismount the vehicle.",
|
||||
ge=-90,
|
||||
lt=90,
|
||||
examples=["53.0137311391"])
|
||||
|
||||
lon: float = Field(
|
||||
description="Longitude of the location. Should describe the location "
|
||||
"where a passenger may mount/dismount the vehicle.",
|
||||
ge=-180,
|
||||
lt=180,
|
||||
examples=["13.9934706687"])
|
||||
|
||||
pickup_dropoff: Optional[PickupDropoffType] = Field(
|
||||
None, description="If passengers may be picked up, dropped off or both at this stop. "
|
||||
"If not specified, this service may assign this according to some custom rules. "
|
||||
"E.g. Amarillo may allow pickup only for the first third of the distance travelled, "
|
||||
"and dropoff only for the last third." ,
|
||||
examples=["only_pickup"]
|
||||
)
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"example": "{'id': 'de:12073:900340137::2', 'name': "
|
||||
"'Angermünde, Breitscheidstr.', 'lat': 53.0137311391, "
|
||||
"'lon': 13.9934706687}"
|
||||
})
|
||||
|
||||
class Region(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the region.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["bb"])
|
||||
|
||||
bbox: Tuple[NumType, NumType, NumType, NumType] = Field(
|
||||
description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]",
|
||||
examples=[[10.5,49.2,11.3,51.3]])
|
||||
|
||||
class RidesharingInfo(BaseModel):
|
||||
number_free_seats: int = Field(
|
||||
description="Number of free seats",
|
||||
ge=0,
|
||||
examples=[3])
|
||||
|
||||
same_gender: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Trip only for same gender:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[1])
|
||||
luggage_size: Optional[LuggageSize] = Field(
|
||||
None,
|
||||
description="Size of the luggage:"
|
||||
"1: small"
|
||||
"2: medium"
|
||||
"3: large",
|
||||
examples=[3])
|
||||
animal_car: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Animals in Car allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
car_model: Optional[str] = Field(
|
||||
None,
|
||||
description="Car model",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["Golf"])
|
||||
car_brand: Optional[str] = Field(
|
||||
None,
|
||||
description="Car brand",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["VW"])
|
||||
|
||||
creation_date: datetime = Field(
|
||||
description="Date when trip was created",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
|
||||
smoking: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Smoking allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
payment_method: Optional[str] = Field(
|
||||
None,
|
||||
description="Method of payment",
|
||||
min_length=1,
|
||||
max_length=48)
|
||||
|
||||
class Driver(BaseModel):
|
||||
driver_id: Optional[str] = Field(
|
||||
None,
|
||||
description="Identifies the driver.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
pattern='^[a-zA-Z0-9_-]+$',
|
||||
examples=["789"])
|
||||
profile_picture: Optional[HttpUrl] = Field(
|
||||
None,
|
||||
description="URL that contains the profile picture",
|
||||
examples=["https://mfdz.de/driver/789/picture"])
|
||||
rating: Optional[int] = Field(
|
||||
None,
|
||||
description="Rating of the driver from 1 to 5."
|
||||
"0 no rating yet",
|
||||
ge=0,
|
||||
le=5,
|
||||
examples=[5])
|
||||
|
||||
class Agency(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the agency.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
name: str = Field(
|
||||
description="Name",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
pattern=r'^[\w -\.\|]+$',
|
||||
examples=["MITFAHR|DE|ZENTRALE"])
|
||||
|
||||
url: HttpUrl = Field(
|
||||
description="URL of the carpool agency.",
|
||||
examples=["https://mfdz.de/"])
|
||||
|
||||
timezone: str = Field(
|
||||
description="Timezone where the carpool agency is located.",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
pattern=r'^[\w/]+$',
|
||||
examples=["Europe/Berlin"])
|
||||
|
||||
lang: str = Field(
|
||||
description="Primary language used by this carpool agency.",
|
||||
min_length=1,
|
||||
max_length=2,
|
||||
pattern=r'^[a-zA-Z_]+$',
|
||||
examples=["de"])
|
||||
|
||||
email: EmailStr = Field(
|
||||
description="""Email address actively monitored by the agency’s
|
||||
customer service department. This email address should be a direct
|
||||
contact point where carpool riders can reach a customer service
|
||||
representative at the agency.""",
|
||||
examples=["info@mfdz.de"])
|
||||
|
||||
terms_url: Optional[HttpUrl] = Field(
|
||||
None, description="""A fully qualified URL pointing to the terms of service
|
||||
(also often called "terms of use" or "terms and conditions")
|
||||
for the service.""",
|
||||
examples=["https://mfdz.de/nutzungsbedingungen"])
|
||||
|
||||
privacy_url: Optional[HttpUrl] = Field(
|
||||
None, description="""A fully qualified URL pointing to the privacy policy for the service.""",
|
||||
examples=["https://mfdz.de/datenschutz"])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Agency",
|
||||
"description": "Carpool agency.",
|
||||
"example":
|
||||
#"""
|
||||
{
|
||||
"id": "mfdz",
|
||||
"name": "MITFAHR|DE|ZENTRALE",
|
||||
"url": "http://mfdz.de",
|
||||
"timezone": "Europe/Berlin",
|
||||
"lang": "de",
|
||||
"email": "info@mfdz.de",
|
||||
"terms_url": "https://mfdz.de/nutzungsbedingungen",
|
||||
"privacy_url": "https://mfdz.de/datenschutz",
|
||||
}
|
||||
#"""
|
||||
})
|
||||
|
||||
class Carpool(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the carpool. Should be supplied and managed by the "
|
||||
"carpooling platform which originally published this "
|
||||
"offer.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
pattern='^[a-zA-Z0-9_-]+$',
|
||||
examples=["103361"])
|
||||
|
||||
agency: str = Field(
|
||||
description="Short one string name of the agency, used as a namespace "
|
||||
"for ids.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
driver: Optional[Driver] = Field(
|
||||
None,
|
||||
description="Driver data",
|
||||
examples=["""
|
||||
{
|
||||
"driver_id": "123",
|
||||
"profile_picture": "https://mfdz.de/driver/789/picture",
|
||||
"rating": 5
|
||||
}
|
||||
"""])
|
||||
|
||||
deeplink: HttpUrl = Field(
|
||||
description="Link to an information page providing detail information "
|
||||
"for this offer, and, especially, an option to book the "
|
||||
"trip/contact the driver.",
|
||||
examples=["https://mfdz.de/trip/103361"])
|
||||
|
||||
stops: List[StopTime] = Field(
|
||||
...,
|
||||
min_length=2,
|
||||
max_length=MAX_STOPS_PER_TRIP,
|
||||
description="Stops which this carpool passes by and offers to pick "
|
||||
"up/drop off passengers. This list must at minimum "
|
||||
"include two stops, the origin and destination of this "
|
||||
"carpool trip. Note that for privacy reasons, the stops "
|
||||
"usually should be official locations, like meeting "
|
||||
"points, carpool parkings, ridesharing benches or "
|
||||
"similar.",
|
||||
examples=["""[
|
||||
{
|
||||
"id": "03",
|
||||
"name": "drei",
|
||||
"lat": 45,
|
||||
"lon": 9
|
||||
},
|
||||
{
|
||||
"id": "03b",
|
||||
"name": "drei b",
|
||||
"lat": 45,
|
||||
"lon": 9
|
||||
}
|
||||
]"""])
|
||||
|
||||
# TODO can be removed, as first stop has departureTime as well
|
||||
departureTime: time = Field(
|
||||
description="Time when the carpool leaves at the first stop. Note, "
|
||||
"that this API currently does not support flexible time "
|
||||
"windows for departure, though drivers might be flexible."
|
||||
"For recurring trips, the weekdays this trip will run. ",
|
||||
examples=["17:00"])
|
||||
|
||||
# TODO think about using googlecal Format
|
||||
departureDate: Union[date, Set[Weekday]] = Field(
|
||||
description="Date when the trip will start, in case it is a one-time "
|
||||
"trip. For recurring trips, specify weekdays. "
|
||||
"Note, that when for different weekdays different "
|
||||
"departureTimes apply, multiple carpool offers should be "
|
||||
"published.",
|
||||
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
|
||||
'"sunday"]'])
|
||||
route_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Route color designation that matches public facing material. "
|
||||
"The color difference between route_color and route_text_color "
|
||||
"should provide sufficient contrast when viewed on a black and "
|
||||
"white screen.",
|
||||
examples=["0039A6"])
|
||||
route_text_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Legible color to use for text drawn against a background of "
|
||||
"route_color. The color difference between route_color and "
|
||||
"route_text_color should provide sufficient contrast when "
|
||||
"viewed on a black and white screen.",
|
||||
examples=["D4D2D2"])
|
||||
path: Optional[LineString] = Field(
|
||||
None, description="Optional route geometry as json LineString.")
|
||||
|
||||
lastUpdated: Optional[datetime] = Field(
|
||||
None,
|
||||
description="LastUpdated should reflect the last time, the user "
|
||||
"providing this offer, made an update or confirmed, "
|
||||
"the offer is still valid. Note that this service might "
|
||||
"purge outdated offers (e.g. older than 180 days). If not "
|
||||
"passed, the service may assume 'now'",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
additional_ridesharing_info: Optional[RidesharingInfo] = Field(
|
||||
None,
|
||||
description="Extension of GRFS to the GTFS standard",
|
||||
examples=["""
|
||||
{
|
||||
"number_free_seats": 2,
|
||||
"creation_date": "2022-02-13T20:20:39+00:00",
|
||||
"same_gender": 2,
|
||||
"smoking": 1,
|
||||
"luggage_size": 3
|
||||
}
|
||||
"""])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Carpool",
|
||||
# description ...
|
||||
"example":
|
||||
"""
|
||||
{
|
||||
"id": "1234",
|
||||
"agency": "mfdz",
|
||||
"deeplink": "http://mfdz.de",
|
||||
"stops": [
|
||||
{
|
||||
"id": "de:12073:900340137::2", "name": "ABC",
|
||||
"lat": 45, "lon": 9
|
||||
},
|
||||
{
|
||||
"id": "de:12073:900340137::3", "name": "XYZ",
|
||||
"lat": 45, "lon": 9
|
||||
}
|
||||
],
|
||||
"departureTime": "12:34",
|
||||
"departureDate": "2022-03-30",
|
||||
"lastUpdated": "2022-03-30T12:34:00+00:00"
|
||||
}
|
||||
"""
|
||||
})
|
||||
0
amarillo_gtfs_generator/models/__init__.py
Normal file
0
amarillo_gtfs_generator/models/__init__.py
Normal file
30
amarillo_gtfs_generator/models/gtfs.py
Normal file
30
amarillo_gtfs_generator/models/gtfs.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
# TODO: move to enhancer
|
||||
from collections import namedtuple
|
||||
from datetime import timedelta
|
||||
|
||||
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
|
||||
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
|
||||
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name route_color route_text_color')
|
||||
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
|
||||
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
|
||||
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed')
|
||||
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
|
||||
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
|
||||
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
|
||||
|
||||
# TODO Move to utils
|
||||
class GtfsTimeDelta(timedelta):
|
||||
def __str__(self):
|
||||
seconds = self.total_seconds()
|
||||
hours = seconds // 3600
|
||||
minutes = (seconds % 3600) // 60
|
||||
seconds = seconds % 60
|
||||
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
|
||||
return (str)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, timedelta):
|
||||
return self.__class__(self.days + other.days,
|
||||
self.seconds + other.seconds,
|
||||
self.microseconds + other.microseconds)
|
||||
return NotImplemented
|
||||
68
amarillo_gtfs_generator/router.py
Normal file
68
amarillo_gtfs_generator/router.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
|
||||
from amarillo.models.Carpool import Region
|
||||
from amarillo.services.regions import RegionService
|
||||
# from amarillo.services.oauth2 import get_current_user, verify_permission
|
||||
# from amarillo.models.User import User
|
||||
from amarillo.utils.container import container
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# @router.post("/export")
|
||||
# async def trigger_export(requesting_user: User = Depends(get_current_user)):
|
||||
# verify_permission("generate-gtfs", requesting_user)
|
||||
# #import is here to avoid circular import
|
||||
# from amarillo.plugins.gtfs_export.gtfs_generator import generate_gtfs
|
||||
# generate_gtfs()
|
||||
|
||||
#TODO: move to amarillo/utils?
|
||||
def _assert_region_exists(region_id: str) -> Region:
|
||||
regions: RegionService = container['regions']
|
||||
region = regions.get_region(region_id)
|
||||
region_exists = region is not None
|
||||
|
||||
if not region_exists:
|
||||
message = f"Region with id {region_id} does not exist."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
|
||||
|
||||
return region
|
||||
|
||||
|
||||
# @router.get("/region/{region_id}/gtfs",
|
||||
# summary="Return GTFS Feed for this region",
|
||||
# response_description="GTFS-Feed (zip-file)",
|
||||
# response_class=FileResponse,
|
||||
# responses={
|
||||
# status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
# }
|
||||
# )
|
||||
# async def get_file(region_id: str, requesting_user: User = Depends(get_current_user)):
|
||||
# verify_permission("gtfs", requesting_user)
|
||||
# _assert_region_exists(region_id)
|
||||
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
|
||||
|
||||
# @router.get("/region/{region_id}/gtfs-rt",
|
||||
# summary="Return GTFS-RT Feed for this region",
|
||||
# response_description="GTFS-RT-Feed",
|
||||
# response_class=FileResponse,
|
||||
# responses={
|
||||
# status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
# status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
|
||||
# }
|
||||
# )
|
||||
# async def get_file(region_id: str, format: str = 'protobuf', requesting_user: User = Depends(get_current_user)):
|
||||
# verify_permission("gtfs", requesting_user)
|
||||
# _assert_region_exists(region_id)
|
||||
# if format == 'json':
|
||||
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
|
||||
# elif format == 'protobuf':
|
||||
# return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
|
||||
# else:
|
||||
# message = "Specified format is not supported, i.e. neither protobuf nor json."
|
||||
# raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
0
amarillo_gtfs_generator/services/__init__.py
Normal file
0
amarillo_gtfs_generator/services/__init__.py
Normal file
24
amarillo_gtfs_generator/services/agencies.py
Normal file
24
amarillo_gtfs_generator/services/agencies.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import json
|
||||
from glob import glob
|
||||
from typing import Dict
|
||||
|
||||
from amarillo.models.Carpool import Agency
|
||||
|
||||
# TODO FG HB this service should also listen to pyinotify
|
||||
# because the (updated) agencies are needed in the enhancer
|
||||
# as well.
|
||||
|
||||
class AgencyService:
|
||||
|
||||
def __init__(self):
|
||||
self.agencies: Dict[str, Agency] = {}
|
||||
for agency_file_name in glob('data/agency/*.json'):
|
||||
with open(agency_file_name) as agency_file:
|
||||
dict = json.load(agency_file)
|
||||
agency = Agency(**dict)
|
||||
agency_id = agency.id
|
||||
self.agencies[agency_id] = agency
|
||||
|
||||
def get_agency(self, agency_id: str) -> Agency:
|
||||
agency = self.agencies.get(agency_id)
|
||||
return agency
|
||||
60
amarillo_gtfs_generator/services/carpools.py
Normal file
60
amarillo_gtfs_generator/services/carpools.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from amarillo.models.Carpool import Carpool
|
||||
from amarillo.utils.utils import yesterday, is_older_than_days
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CarpoolService():
|
||||
MAX_OFFER_AGE_IN_DAYS = 180
|
||||
|
||||
def __init__(self, trip_store):
|
||||
|
||||
self.trip_store = trip_store
|
||||
self.carpools: Dict[str, Carpool] = {}
|
||||
|
||||
def is_outdated(self, carpool):
|
||||
"""
|
||||
A carpool offer is outdated, if
|
||||
* it's completly in the past (if it's a single date offer).
|
||||
As we know the start time but not latest arrival, we deem
|
||||
offers starting the day before yesterday as outdated
|
||||
* it's last update occured before MAX_OFFER_AGE_IN_DAYS
|
||||
"""
|
||||
runs_once = not isinstance(carpool.departureDate, set)
|
||||
return (is_older_than_days(carpool.lastUpdated.date(), self.MAX_OFFER_AGE_IN_DAYS) or
|
||||
(runs_once and carpool.departureDate < yesterday()))
|
||||
|
||||
def purge_outdated_offers(self):
|
||||
"""
|
||||
Iterates over all carpools and deletes those which are outdated
|
||||
"""
|
||||
for key in list(self.carpools.keys()):
|
||||
cp = self.carpools.get(key)
|
||||
if cp and self.is_outdated(cp):
|
||||
logger.info("Purge outdated offer %s", key)
|
||||
self.delete(cp.agency, cp.id)
|
||||
|
||||
def get(self, agency_id: str, carpool_id: str):
|
||||
return self.carpools.get(f"{agency_id}:{carpool_id}")
|
||||
|
||||
def get_all_ids(self):
|
||||
return list(self.carpools)
|
||||
|
||||
def put(self, agency_id: str, carpool_id: str, carpool):
|
||||
self.carpools[f"{agency_id}:{carpool_id}"] = carpool
|
||||
# Outdated trips (which might have been in the store)
|
||||
# will be deleted
|
||||
if self.is_outdated(carpool):
|
||||
logger.info('Deleting outdated carpool %s:%s', agency_id, carpool_id)
|
||||
self.delete(agency_id, carpool_id)
|
||||
else:
|
||||
self.trip_store.put_carpool(carpool)
|
||||
|
||||
def delete(self, agency_id: str, carpool_id: str):
|
||||
id = f"{agency_id}:{carpool_id}"
|
||||
if id in self.carpools:
|
||||
del self.carpools[id]
|
||||
self.trip_store.delete_carpool(agency_id, carpool_id)
|
||||
21
amarillo_gtfs_generator/services/regions.py
Normal file
21
amarillo_gtfs_generator/services/regions.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import json
|
||||
from glob import glob
|
||||
from typing import Dict
|
||||
|
||||
from amarillo.models.Carpool import Region
|
||||
|
||||
|
||||
class RegionService:
|
||||
|
||||
def __init__(self):
|
||||
self.regions: Dict[str, Region] = {}
|
||||
for region_file_name in glob('data/region/*.json'):
|
||||
with open(region_file_name) as region_file:
|
||||
dict = json.load(region_file)
|
||||
region = Region(**dict)
|
||||
region_id = region.id
|
||||
self.regions[region_id] = region
|
||||
|
||||
def get_region(self, region_id: str) -> Region:
|
||||
region = self.regions.get(region_id)
|
||||
return region
|
||||
381
amarillo_gtfs_generator/services/trips.py
Normal file
381
amarillo_gtfs_generator/services/trips.py
Normal file
|
|
@ -0,0 +1,381 @@
|
|||
from ..models.gtfs import GtfsTimeDelta, GtfsStopTime
|
||||
from ..models.Carpool import MAX_STOPS_PER_TRIP, Carpool, Weekday, StopTime, PickupDropoffType, Driver, RidesharingInfo
|
||||
# from amarillo.services.config import config
|
||||
from ..gtfs_constants import *
|
||||
# from amarillo.plugins.enhancer.services.routing import RoutingService, RoutingException
|
||||
from amarillo_stops.stops import is_carpooling_stop
|
||||
from amarillo.utils.utils import assert_folder_exists, is_older_than_days, yesterday, geodesic_distance_in_m
|
||||
from shapely.geometry import Point, LineString, box
|
||||
from geojson_pydantic.geometries import LineString as GeoJSONLineString
|
||||
from datetime import datetime, timedelta
|
||||
import numpy as np
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Trip:
|
||||
|
||||
def __init__(self, trip_id, route_name, headsign, url, calendar, departureTime, path, agency, lastUpdated, stop_times, driver: Driver, additional_ridesharing_info: RidesharingInfo, route_color, route_text_color, bbox):
|
||||
if isinstance(calendar, set):
|
||||
self.runs_regularly = True
|
||||
self.weekdays = [
|
||||
1 if Weekday.monday in calendar else 0,
|
||||
1 if Weekday.tuesday in calendar else 0,
|
||||
1 if Weekday.wednesday in calendar else 0,
|
||||
1 if Weekday.thursday in calendar else 0,
|
||||
1 if Weekday.friday in calendar else 0,
|
||||
1 if Weekday.saturday in calendar else 0,
|
||||
1 if Weekday.sunday in calendar else 0,
|
||||
]
|
||||
start_in_day = self._total_seconds(departureTime)
|
||||
else:
|
||||
self.start = datetime.combine(calendar, departureTime)
|
||||
self.runs_regularly = False
|
||||
self.weekdays = [0,0,0,0,0,0,0]
|
||||
|
||||
self.start_time = departureTime
|
||||
self.path = path
|
||||
self.trip_id = trip_id
|
||||
self.url = url
|
||||
self.agency = agency
|
||||
self.stops = []
|
||||
self.lastUpdated = lastUpdated
|
||||
self.stop_times = stop_times
|
||||
self.driver = driver
|
||||
self.additional_ridesharing_info = additional_ridesharing_info
|
||||
self.route_color = route_color
|
||||
self.route_text_color = route_text_color
|
||||
self.bbox = bbox
|
||||
self.route_name = route_name
|
||||
self.trip_headsign = headsign
|
||||
|
||||
def path_as_line_string(self):
|
||||
return self.path
|
||||
|
||||
def _total_seconds(self, instant):
|
||||
return instant.hour * 3600 + instant.minute * 60 + instant.second
|
||||
|
||||
def start_time_str(self):
|
||||
return self.start_time.strftime("%H:%M:%S")
|
||||
|
||||
def next_trip_dates(self, start_date, day_count=14):
|
||||
if self.runs_regularly:
|
||||
for single_date in (start_date + timedelta(n) for n in range(day_count)):
|
||||
if self.weekdays[single_date.weekday()]==1:
|
||||
yield single_date.strftime("%Y%m%d")
|
||||
else:
|
||||
yield self.start.strftime("%Y%m%d")
|
||||
|
||||
def route_long_name(self):
|
||||
return self.route_name
|
||||
|
||||
def intersects(self, bbox):
|
||||
return self.bbox.intersects(box(*bbox))
|
||||
|
||||
|
||||
class TripStore():
|
||||
"""
|
||||
TripStore maintains the currently valid trips. A trip is a
|
||||
carpool offer enhanced with all stops this
|
||||
|
||||
Attributes:
|
||||
trips Dict of currently valid trips.
|
||||
deleted_trips Dict of recently deleted trips.
|
||||
"""
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.transformer = TripTransformer(stops_store)
|
||||
self.stops_store = stops_store
|
||||
self.trips = {}
|
||||
self.deleted_trips = {}
|
||||
self.recent_trips = {}
|
||||
|
||||
|
||||
def put_carpool(self, carpool: Carpool):
|
||||
"""
|
||||
Adds carpool to the TripStore.
|
||||
"""
|
||||
return self._load_as_trip(carpool)
|
||||
# id = "{}:{}".format(carpool.agency, carpool.id)
|
||||
# filename = f'data/enhanced/{carpool.agency}/{carpool.id}.json'
|
||||
# try:
|
||||
# existing_carpool = self._load_carpool_if_exists(carpool.agency, carpool.id)
|
||||
# if existing_carpool and existing_carpool.lastUpdated == carpool.lastUpdated:
|
||||
# enhanced_carpool = existing_carpool
|
||||
# else:
|
||||
# if len(carpool.stops) < 2 or self.distance_in_m(carpool) < 1000:
|
||||
# logger.warning("Failed to add carpool %s:%s to TripStore, distance too low", carpool.agency, carpool.id)
|
||||
# self.handle_failed_carpool_enhancement(carpool)
|
||||
# return
|
||||
# enhanced_carpool = self.transformer.enhance_carpool(carpool)
|
||||
# # TODO should only store enhanced_carpool, if it has 2 or more stops
|
||||
# assert_folder_exists(f'data/enhanced/{carpool.agency}/')
|
||||
# with open(filename, 'w', encoding='utf-8') as f:
|
||||
# f.write(enhanced_carpool.json())
|
||||
# logger.info("Added enhanced carpool %s:%s", carpool.agency, carpool.id)
|
||||
|
||||
# return self._load_as_trip(enhanced_carpool)
|
||||
# except RoutingException as err:
|
||||
# logger.warning("Failed to add carpool %s:%s to TripStore due to RoutingException %s", carpool.agency, carpool.id, getattr(err, 'message', repr(err)))
|
||||
# self.handle_failed_carpool_enhancement(carpool)
|
||||
# except Exception as err:
|
||||
# logger.error("Failed to add carpool %s:%s to TripStore.", carpool.agency, carpool.id, exc_info=True)
|
||||
# self.handle_failed_carpool_enhancement(carpool)
|
||||
|
||||
def handle_failed_carpool_enhancement(sellf, carpool: Carpool):
|
||||
assert_folder_exists(f'data/failed/{carpool.agency}/')
|
||||
with open(f'data/failed/{carpool.agency}/{carpool.id}.json', 'w', encoding='utf-8') as f:
|
||||
f.write(carpool.json())
|
||||
|
||||
def distance_in_m(self, carpool):
|
||||
if len(carpool.stops) < 2:
|
||||
return 0
|
||||
s1 = carpool.stops[0]
|
||||
s2 = carpool.stops[-1]
|
||||
return geodesic_distance_in_m((s1.lon, s1.lat),(s2.lon, s2.lat))
|
||||
|
||||
def recently_added_trips(self):
|
||||
return list(self.recent_trips.values())
|
||||
|
||||
def recently_deleted_trips(self):
|
||||
return list(self.deleted_trips.values())
|
||||
|
||||
def _load_carpool_if_exists(self, agency_id: str, carpool_id: str):
|
||||
if carpool_exists(agency_id, carpool_id, 'data/enhanced'):
|
||||
try:
|
||||
return load_carpool(agency_id, carpool_id, 'data/enhanced')
|
||||
except Exception as e:
|
||||
# An error on restore could be caused by model changes,
|
||||
# in such a case, it need's to be recreated
|
||||
logger.warning("Could not restore enhanced trip %s:%s, reason: %s", agency_id, carpool_id, repr(e))
|
||||
|
||||
return None
|
||||
|
||||
def _load_as_trip(self, carpool: Carpool):
|
||||
trip = self.transformer.transform_to_trip(carpool)
|
||||
id = trip.trip_id
|
||||
self.trips[id] = trip
|
||||
if not is_older_than_days(carpool.lastUpdated, 1):
|
||||
self.recent_trips[id] = trip
|
||||
logger.debug("Added trip %s", id)
|
||||
|
||||
return trip
|
||||
|
||||
def delete_carpool(self, agency_id: str, carpool_id: str):
|
||||
"""
|
||||
Deletes carpool from the TripStore.
|
||||
"""
|
||||
agencyScopedCarpoolId = f"{agency_id}:{carpool_id}"
|
||||
trip_to_be_deleted = self.trips.get(agencyScopedCarpoolId)
|
||||
if trip_to_be_deleted:
|
||||
self.deleted_trips[agencyScopedCarpoolId] = trip_to_be_deleted
|
||||
del self.trips[agencyScopedCarpoolId]
|
||||
|
||||
if self.recent_trips.get(agencyScopedCarpoolId):
|
||||
del self.recent_trips[agencyScopedCarpoolId]
|
||||
|
||||
if carpool_exists(agency_id, carpool_id):
|
||||
remove_carpool_file(agency_id, carpool_id)
|
||||
|
||||
logger.debug("Deleted trip %s", id)
|
||||
|
||||
def unflag_unrecent_updates(self):
|
||||
"""
|
||||
Trips that were last updated before yesterday, are not recent
|
||||
any longer. As no updates need to be sent for them any longer,
|
||||
they will be removed from recent recent_trips and deleted_trips.
|
||||
"""
|
||||
for key in list(self.recent_trips):
|
||||
t = self.recent_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.recent_trips[key]
|
||||
|
||||
for key in list(self.deleted_trips):
|
||||
t = self.deleted_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.deleted_trips[key]
|
||||
|
||||
|
||||
class TripTransformer:
|
||||
REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS = True
|
||||
REPLACEMENT_STOPS_SERACH_RADIUS_IN_M = 1000
|
||||
SIMPLIFY_TOLERANCE = 0.0001
|
||||
|
||||
# router = RoutingService(config.graphhopper_base_url)
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.stops_store = stops_store
|
||||
|
||||
def transform_to_trip(self, carpool : Carpool):
|
||||
stop_times = self._convert_stop_times(carpool)
|
||||
route_name = carpool.stops[0].name + " nach " + carpool.stops[-1].name
|
||||
headsign= carpool.stops[-1].name
|
||||
trip_id = self._trip_id(carpool)
|
||||
path = carpool.path
|
||||
bbox = box(
|
||||
min([pt[0] for pt in path.coordinates]),
|
||||
min([pt[1] for pt in path.coordinates]),
|
||||
max([pt[0] for pt in path.coordinates]),
|
||||
max([pt[1] for pt in path.coordinates]))
|
||||
|
||||
trip = Trip(trip_id, route_name, headsign, str(carpool.deeplink), carpool.departureDate, carpool.departureTime, carpool.path, carpool.agency, carpool.lastUpdated, stop_times, carpool.driver, carpool.additional_ridesharing_info, carpool.route_color, carpool.route_text_color, bbox)
|
||||
|
||||
return trip
|
||||
|
||||
def _trip_id(self, carpool):
|
||||
return f"{carpool.agency}:{carpool.id}"
|
||||
|
||||
def _replace_stops_by_transit_stops(self, carpool, max_search_distance):
|
||||
new_stops = []
|
||||
for carpool_stop in carpool.stops:
|
||||
new_stops.append(self.stops_store.find_closest_stop(carpool_stop, max_search_distance))
|
||||
return new_stops
|
||||
|
||||
def enhance_carpool(self, carpool):
|
||||
if self.REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS:
|
||||
carpool.stops = self._replace_stops_by_transit_stops(carpool, self.REPLACEMENT_STOPS_SERACH_RADIUS_IN_M)
|
||||
|
||||
path = self._path_for_ride(carpool)
|
||||
lineString_shapely_wgs84 = LineString(coordinates = path["points"]["coordinates"]).simplify(0.0001)
|
||||
lineString_wgs84 = GeoJSONLineString(type="LineString", coordinates=list(lineString_shapely_wgs84.coords))
|
||||
virtual_stops = self.stops_store.find_additional_stops_around(lineString_wgs84, carpool.stops)
|
||||
if not virtual_stops.empty:
|
||||
virtual_stops["time"] = self._estimate_times(path, virtual_stops['distance'])
|
||||
logger.debug("Virtual stops found: {}".format(virtual_stops))
|
||||
if len(virtual_stops) > MAX_STOPS_PER_TRIP:
|
||||
# in case we found more than MAX_STOPS_PER_TRIP, we retain first and last
|
||||
# half of MAX_STOPS_PER_TRIP
|
||||
virtual_stops = virtual_stops.iloc[np.r_[0:int(MAX_STOPS_PER_TRIP/2), int(MAX_STOPS_PER_TRIP/2):]]
|
||||
|
||||
trip_id = f"{carpool.agency}:{carpool.id}"
|
||||
stop_times = self._stops_and_stop_times(carpool.departureTime, trip_id, virtual_stops)
|
||||
|
||||
enhanced_carpool = carpool.copy()
|
||||
enhanced_carpool.stops = stop_times
|
||||
enhanced_carpool.path = lineString_wgs84
|
||||
return enhanced_carpool
|
||||
|
||||
def _convert_stop_times(self, carpool):
|
||||
|
||||
stop_times = [GtfsStopTime(
|
||||
self._trip_id(carpool),
|
||||
stop.arrivalTime,
|
||||
stop.departureTime,
|
||||
stop.id,
|
||||
seq_nr+1,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_dropoff else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_pickup else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE)
|
||||
for seq_nr, stop in enumerate(carpool.stops)]
|
||||
return stop_times
|
||||
|
||||
def _path_for_ride(self, carpool):
|
||||
points = self._stop_coords(carpool.stops)
|
||||
return self.router.path_for_stops(points)
|
||||
|
||||
def _stop_coords(self, stops):
|
||||
# Retrieve coordinates of all officially announced stops (start, intermediate, target)
|
||||
return [Point(stop.lon, stop.lat) for stop in stops]
|
||||
|
||||
def _estimate_times(self, path, distances_from_start):
|
||||
cumulated_distance = 0
|
||||
cumulated_time = 0
|
||||
stop_times = []
|
||||
instructions = path["instructions"]
|
||||
|
||||
cnt = 0
|
||||
instr_distance = instructions[cnt]["distance"]
|
||||
instr_time = instructions[cnt]["time"]
|
||||
|
||||
for distance in distances_from_start:
|
||||
while cnt < len(instructions) and cumulated_distance + instructions[cnt]["distance"] < distance:
|
||||
cumulated_distance = cumulated_distance + instructions[cnt]["distance"]
|
||||
cumulated_time = cumulated_time + instructions[cnt]["time"]
|
||||
cnt = cnt + 1
|
||||
|
||||
if cnt < len(instructions):
|
||||
if instructions[cnt]["distance"] ==0:
|
||||
raise Exception("Origin and destinaction too close")
|
||||
# raise RoutingException("Origin and destinaction too close")
|
||||
percent_dist = (distance - cumulated_distance) / instructions[cnt]["distance"]
|
||||
stop_time = cumulated_time + percent_dist * instructions[cnt]["time"]
|
||||
stop_times.append(stop_time)
|
||||
else:
|
||||
logger.debug("distance {} exceeds total length {}, using max arrival time {}".format(distance, cumulated_distance, cumulated_time))
|
||||
stop_times.append(cumulated_time)
|
||||
return stop_times
|
||||
|
||||
def _stops_and_stop_times(self, start_time, trip_id, stops_frame):
|
||||
# Assumptions:
|
||||
# arrival_time = departure_time
|
||||
# pickup_type, drop_off_type for origin: = coordinate/none
|
||||
# pickup_type, drop_off_type for destination: = none/coordinate
|
||||
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
|
||||
number_of_stops = len(stops_frame.index)
|
||||
total_distance = stops_frame.iloc[number_of_stops-1]["distance"]
|
||||
|
||||
first_stop_time = GtfsTimeDelta(hours = start_time.hour, minutes = start_time.minute, seconds = start_time.second)
|
||||
stop_times = []
|
||||
seq_nr = 0
|
||||
for i in range(0, number_of_stops):
|
||||
current_stop = stops_frame.iloc[i]
|
||||
|
||||
if not current_stop.id:
|
||||
continue
|
||||
elif i == 0:
|
||||
if (stops_frame.iloc[1].time-current_stop.time) < 1000:
|
||||
# skip custom stop if there is an official stop very close by
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
else:
|
||||
if (current_stop.time-stops_frame.iloc[i-1].time) < 5000 and not i==1 and not is_carpooling_stop(current_stop.id, current_stop.stop_name):
|
||||
# skip latter stop if it's very close (<5 seconds drive) by the preceding
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
trip_time = timedelta(milliseconds=int(current_stop.time))
|
||||
is_dropoff = self._is_dropoff_stop(current_stop, total_distance)
|
||||
is_pickup = self._is_pickup_stop(current_stop, total_distance)
|
||||
# TODO would be nice if possible to publish a minimum shared distance
|
||||
pickup_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_pickup else STOP_TIMES_STOP_TYPE_NONE
|
||||
dropoff_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_dropoff else STOP_TIMES_STOP_TYPE_NONE
|
||||
|
||||
if is_pickup and not is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_pickup
|
||||
elif not is_pickup and is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_dropoff
|
||||
else:
|
||||
pickup_dropoff = PickupDropoffType.pickup_and_dropoff
|
||||
|
||||
next_stop_time = first_stop_time + trip_time
|
||||
seq_nr += 1
|
||||
stop_times.append(StopTime(**{
|
||||
'arrivalTime': str(next_stop_time),
|
||||
'departureTime': str(next_stop_time),
|
||||
'id': current_stop.id,
|
||||
'pickup_dropoff': pickup_dropoff,
|
||||
'name': str(current_stop.stop_name),
|
||||
'lat': current_stop.y,
|
||||
'lon': current_stop.x
|
||||
}))
|
||||
|
||||
return stop_times
|
||||
|
||||
def _is_dropoff_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] >= 0.5 * total_distance
|
||||
|
||||
def _is_pickup_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] < 0.5 * total_distance
|
||||
|
||||
def load_carpool(agency_id: str, carpool_id: str, folder: str ='data/enhanced') -> Carpool:
|
||||
with open(f'{folder}/{agency_id}/{carpool_id}.json', 'r', encoding='utf-8') as f:
|
||||
dict = json.load(f)
|
||||
carpool = Carpool(**dict)
|
||||
return carpool
|
||||
|
||||
def carpool_exists(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.path.exists(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
|
||||
def remove_carpool_file(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.remove(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
0
amarillo_gtfs_generator/tests/__init__.py
Normal file
0
amarillo_gtfs_generator/tests/__init__.py
Normal file
142
amarillo_gtfs_generator/tests/test_gtfs.py
Normal file
142
amarillo_gtfs_generator/tests/test_gtfs.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
from amarillo.tests.sampledata import carpool_1234, data1, carpool_repeating_json, stop_issue
|
||||
from amarillo_gtfs_generator.gtfs_export import GtfsExport
|
||||
from amarillo_gtfs_generator.gtfs import GtfsRtProducer
|
||||
from amarillo_stops.stops import StopsStore
|
||||
from amarillo_gtfs_generator.services.trips import TripStore
|
||||
from amarillo.models.Carpool import Carpool
|
||||
from datetime import datetime
|
||||
import time
|
||||
import pytest
|
||||
|
||||
|
||||
def test_gtfs_generation():
|
||||
cp = Carpool(**data1)
|
||||
stops_store = StopsStore()
|
||||
trips_store = TripStore(stops_store)
|
||||
trips_store.put_carpool(cp)
|
||||
|
||||
exporter = GtfsExport(None, None, trips_store, stops_store)
|
||||
exporter.export('target/tests/test_gtfs_generation/test.gtfs.zip', "target/tests/test_gtfs_generation")
|
||||
|
||||
def test_correct_stops():
|
||||
cp = Carpool(**stop_issue)
|
||||
stops_store = StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 250}])
|
||||
stops_store.load_stop_sources()
|
||||
trips_store = TripStore(stops_store)
|
||||
trips_store.put_carpool(cp)
|
||||
assert len(trips_store.trips) == 1
|
||||
|
||||
|
||||
class TestTripConverter:
|
||||
|
||||
def setup_method(self, method):
|
||||
self.stops_store = StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
|
||||
self.trips_store = TripStore(self.stops_store)
|
||||
|
||||
def test_as_one_time_trip_as_delete_update(self):
|
||||
cp = Carpool(**data1)
|
||||
self.trips_store.put_carpool(cp)
|
||||
trip = next(iter(self.trips_store.trips.values()))
|
||||
|
||||
converter = GtfsRtProducer(self.trips_store)
|
||||
json = converter._as_delete_updates(trip, datetime(2022,4,11))
|
||||
|
||||
assert json == [{
|
||||
'trip': {
|
||||
'tripId': 'mfdz:Eins',
|
||||
'startTime': '23:59:00',
|
||||
'startDate': '20220530',
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': 'mfdz:Eins'
|
||||
}
|
||||
}]
|
||||
|
||||
def test_as_one_time_trip_as_added_update(self):
|
||||
cp = Carpool(**data1)
|
||||
self.trips_store.put_carpool(cp)
|
||||
trip = next(iter(self.trips_store.trips.values()))
|
||||
|
||||
converter = GtfsRtProducer(self.trips_store)
|
||||
json = converter._as_added_updates(trip, datetime(2022,4,11))
|
||||
assert json == [{
|
||||
'trip': {
|
||||
'tripId': 'mfdz:Eins',
|
||||
'startTime': '23:59:00',
|
||||
'startDate': '20220530',
|
||||
'scheduleRelationship': 'ADDED',
|
||||
'routeId': 'mfdz:Eins',
|
||||
'[transit_realtime.trip_descriptor]': {
|
||||
'routeUrl' : 'https://mfdz.de/trip/123',
|
||||
'agencyId' : 'mfdz',
|
||||
'route_long_name' : 'abc nach xyz',
|
||||
'route_type': 1551
|
||||
}
|
||||
},
|
||||
'stopTimeUpdate': [{
|
||||
'stopSequence': 1,
|
||||
'arrival': {
|
||||
'time': time.mktime(datetime(2022,5,30,23,59,0).timetuple()),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'departure': {
|
||||
'time': time.mktime(datetime(2022,5,30,23,59,0).timetuple()),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'stopId': 'mfdz:12073:001',
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'NONE',
|
||||
'pickupType': 'COORDINATE_WITH_DRIVER'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'stopSequence': 2,
|
||||
'arrival': {
|
||||
'time': time.mktime(datetime(2022,5,31,0,16,45,0).timetuple()),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'departure': {
|
||||
'time': time.mktime(datetime(2022,5,31,0,16,45,0).timetuple()),
|
||||
'uncertainty': 600
|
||||
},
|
||||
|
||||
'stopId': 'de:12073:900340137::3',
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'COORDINATE_WITH_DRIVER',
|
||||
'pickupType': 'NONE'
|
||||
}
|
||||
}
|
||||
}]
|
||||
}]
|
||||
|
||||
def test_as_periodic_trip_as_delete_update(self):
|
||||
cp = Carpool(**carpool_repeating_json)
|
||||
self.trips_store.put_carpool(cp)
|
||||
trip = next(iter(self.trips_store.trips.values()))
|
||||
|
||||
converter = GtfsRtProducer(self.trips_store)
|
||||
json = converter._as_delete_updates(trip, datetime(2022,4,11))
|
||||
|
||||
assert json == [{
|
||||
'trip': {
|
||||
'tripId': 'mfdz:Zwei',
|
||||
'startTime': '15:00:00',
|
||||
'startDate': '20220411',
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': 'mfdz:Zwei'
|
||||
}
|
||||
},
|
||||
{
|
||||
'trip': {
|
||||
'tripId': 'mfdz:Zwei',
|
||||
'startTime': '15:00:00',
|
||||
'startDate': '20220418',
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': 'mfdz:Zwei'
|
||||
}
|
||||
}
|
||||
]
|
||||
BIN
docs-overview-diagram.png
Normal file
BIN
docs-overview-diagram.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 71 KiB |
22
logging.conf
Normal file
22
logging.conf
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
[loggers]
|
||||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=consoleHandler
|
||||
|
||||
[formatters]
|
||||
keys=simpleFormatter
|
||||
|
||||
[logger_root]
|
||||
level=INFO
|
||||
handlers=consoleHandler
|
||||
propagate=yes
|
||||
|
||||
[handler_consoleHandler]
|
||||
class=StreamHandler
|
||||
level=DEBUG
|
||||
formatter=simpleFormatter
|
||||
args=(sys.stdout,)
|
||||
|
||||
[formatter_simpleFormatter]
|
||||
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
|
||||
11
pyproject.toml
Normal file
11
pyproject.toml
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
[project]
|
||||
name = "amarillo-gtfs-generator"
|
||||
version = "0.0.2"
|
||||
dependencies = [
|
||||
"amarillo",
|
||||
"schedule",
|
||||
"watchdog",
|
||||
]
|
||||
|
||||
[tool.setuptools.packages]
|
||||
find = {}
|
||||
3
requirements.txt
Normal file
3
requirements.txt
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
amarillo
|
||||
schedule
|
||||
watchdog
|
||||
Loading…
Reference in a new issue