Compare commits

..

No commits in common. "main" and "jenkins-test" have entirely different histories.

74 changed files with 1552 additions and 1521 deletions

55
.gitignore vendored
View file

@ -1,3 +1,4 @@
# ---> Python
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
@ -20,7 +21,6 @@ parts/
sdist/ sdist/
var/ var/
wheels/ wheels/
pip-wheel-metadata/
share/python-wheels/ share/python-wheels/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
@ -50,6 +50,7 @@ coverage.xml
*.py,cover *.py,cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
cover/
# Translations # Translations
*.mo *.mo
@ -72,6 +73,7 @@ instance/
docs/_build/ docs/_build/
# PyBuilder # PyBuilder
.pybuilder/
target/ target/
# Jupyter Notebook # Jupyter Notebook
@ -82,7 +84,9 @@ profile_default/
ipython_config.py ipython_config.py
# pyenv # pyenv
.python-version # For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv # pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
@ -91,7 +95,22 @@ ipython_config.py
# install all needed dependencies. # install all needed dependencies.
#Pipfile.lock #Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow # poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/ __pypackages__/
# Celery stuff # Celery stuff
@ -128,11 +147,20 @@ dmypy.json
# Pyre type checker # Pyre type checker
.pyre/ .pyre/
setup.ipynb # pytype static type analyzer
.pytype/
*~ # Cython debug symbols
/.idea/ cython_debug/
/.vscode/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.vscode
secrets secrets
gtfs/*.zip gtfs/*.zip
gtfs/*.pbf gtfs/*.pbf
@ -142,14 +170,5 @@ data/enhanced/
data/failed/ data/failed/
data/trash/ data/trash/
data/gtfs/ data/gtfs/
data/grfs data/tmp/
data/tmp data/agencyconf
data/users/**
data/**
#these files are under app/static but they get copied to the outside directory on startup
logging.conf
config
static/**
templates/**
conf/**

View file

@ -1,43 +0,0 @@
FROM tiangolo/uvicorn-gunicorn:python3.10-slim
LABEL maintainer="info@mfdz.de"
WORKDIR /app
RUN \
apt update \
&& apt install -y \
# GDAL headers are required for fiona, which is required for geopandas.
# Also gcc is used to compile C++ code.
libgdal-dev g++ \
# libspatialindex is required for rtree.
libspatialindex-dev \
# Remove package index obtained by `apt update`.
&& rm -rf /var/lib/apt/lists/*
ENV ADMIN_TOKEN=''
ENV RIDE2GO_TOKEN=''
ENV SECRET_KEY=''
ENV METRICS_USER=''
ENV METRICS_PASSWORD=''
EXPOSE 80
ARG PACKAGE_REGISTRY_URL
ARG PLUGINS
COPY requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
RUN --mount=type=secret,id=AMARILLO_REGISTRY_CREDENTIALS \
pip install --no-cache-dir --upgrade --extra-index-url https://$(cat /run/secrets/AMARILLO_REGISTRY_CREDENTIALS)@${PACKAGE_REGISTRY_URL} ${PLUGINS}
COPY ./amarillo /app/amarillo
COPY ./amarillo/plugins /app/amarillo/plugins
COPY ./amarillo/static/static /app/static
COPY ./amarillo/static/templates /app/templates
COPY ./amarillo/static/config /app
COPY ./amarillo/static/logging.conf /app
COPY ./amarillo/static/conf /app/conf
# This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead
# CMD ["uvicorn", "amarillo.main:app", "--host", "0.0.0.0", "--port", "8000"]

76
Jenkinsfile vendored
View file

@ -1,23 +1,19 @@
pipeline { pipeline {
agent any agent {
docker {
image 'python:3'
args '-u root'
}
}
environment { environment {
GITEA_CREDS = credentials('AMARILLO-JENKINS-GITEA-USER') GITEA_CREDS = credentials('AMARILLO-JENKINS-GITEA-USER')
TWINE_REPO_URL = "https://git.gerhardt.io/api/packages/amarillo/pypi" TWINE_REPO_URL = "https://git.gerhardt.io/api/packages/amarillo/pypi"
PLUGINS_REPO_URL = "git.gerhardt.io/api/packages/amarillo/pypi/simple"
DOCKER_REGISTRY_URL = 'https://git.gerhardt.io'
OWNER = 'amarillo'
IMAGE_NAME = 'amarillo'
AMARILLO_DISTRIBUTION = '0.2'
TAG = "${AMARILLO_DISTRIBUTION}.${BUILD_NUMBER}"
PLUGINS = 'amarillo-metrics amarillo-enhancer amarillo-grfs-exporter'
DEPLOY_WEBHOOK_URL = 'http://amarillo.mfdz.de:8888/mitanand'
DEPLOY_SECRET = credentials('AMARILLO-JENKINS-DEPLOY-SECRET')
} }
stages { stages {
stage('Create virtual environment') { stage('Create virtual environment') {
steps { steps {
echo 'Creating virtual environment' echo 'Creating virtual environment'
sh '''python3 -m venv .venv sh '''python -m venv .venv
. .venv/bin/activate''' . .venv/bin/activate'''
} }
@ -25,61 +21,25 @@ pipeline {
stage('Installing requirements') { stage('Installing requirements') {
steps { steps {
echo 'Installing packages' echo 'Installing packages'
sh 'python3 -m pip install -r requirements.txt' sh 'python -m pip install -r requirements.txt'
sh 'python3 -m pip install --upgrade build' sh 'python -m pip install --upgrade build'
sh 'python3 -m pip install --upgrade twine' sh 'python -m pip install --upgrade twine'
} }
} }
stage('Build package') { stage('Test') {
steps {
echo 'Testing'
}
}
stage('Build') {
steps { steps {
echo 'Building package' echo 'Building package'
sh 'python3 -m build' sh 'python -m build'
} }
} }
stage('Publish package') { stage('Publish package') {
steps { steps {
sh 'python3 -m twine upload --skip-existing --verbose --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*' sh 'python -m twine upload --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*'
}
}
stage('Build docker image') {
when {
branch 'main'
}
steps {
echo 'Building image'
script {
docker.build("${OWNER}/${IMAGE_NAME}:${TAG}",
//--no-cache to make sure plugins are updated
"--no-cache --build-arg='PACKAGE_REGISTRY_URL=${PLUGINS_REPO_URL}' --build-arg='PLUGINS=${PLUGINS}' --secret id=AMARILLO_REGISTRY_CREDENTIALS,env=GITEA_CREDS .")
}
}
}
stage('Push image to container registry') {
when {
branch 'main'
}
steps {
echo 'Pushing image to registry'
script {
docker.withRegistry(DOCKER_REGISTRY_URL, 'AMARILLO-JENKINS-GITEA-USER'){
def image = docker.image("${OWNER}/${IMAGE_NAME}:${TAG}")
image.push()
image.push('latest')
}
}
}
}
stage('Notify CD script') {
when {
branch 'main'
}
steps {
echo 'Triggering deploy webhook'
script {
def response = httpRequest contentType: 'APPLICATION_JSON',
httpMode: 'POST', requestBody: '{}', authentication: 'AMARILLO-JENKINS-DEPLOY-SECRET',
url: "${DEPLOY_WEBHOOK_URL}"
}
} }
} }
} }

View file

@ -1,2 +0,0 @@
recursive-include amarillo/static/ *
recursive-include amarillo/tests/ *

View file

@ -1,66 +1,2 @@
# Amarillo # amarillo-core
**CRUD for carpool offers**
An Amarillo is a [yellow-dressed person](https://www.dreamstime.com/sancti-spiritus-cuba-feb-road-spot-amarillos-yellow-ones-who-stop-cars-oblige-driver-to-give-lift-people-waiting-image130186034) helping others to find a car pool in Cuba.
## Setup
- Python 3.9.2 with pip
- python3-venv
Create a virtual environment `python3 -m venv venv`.
Activate the environment and install the dependencies `pip install -r requirements.txt`.
Run `uvicorn amarillo.main:app`.
In development, you can use `--reload`.
## Environment Variables
- `env`
- `ADMIN_TOKEN`
## Security
All endpoints are protected by an API-Key in the HTTP header.
There is a special *admin* user.
For this user, the API-Key must be passed in as an environment variable when
Amarillo is started.
The admin can create additional API-Keys in the `/users` endpoint. This
endpoint is always available but not always shown in `/docs`, especially not
when running in production.
The Swagger docs for `/users` can be seen on the MFDZ demo server.
Permissions work this way
- the admin is allowed to call all operations on all resources. Only the admin
can create new API-Keys by POSTing an `users` JSON object to `/users`.
- API-Keys for agencies are allowed to POST/PUT/GET/DELETE their own
resources and GET some public resources.
## Development
### GTFS-RT python bindings
In case you modify or update the proto-files in amarillo/proto, you'll need to regenerate the python bindings. First, create the python files:
```sh
$ cd amarillo/proto
$ protoc --version
libprotoc 3.21.6
$ protoc --proto_path=. --python_out=../services/gtfsrt gtfs-realtime.proto realtime_extension.proto
$ sed 's/import gtfs_realtime_pb2/import amarillo.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
```
## Testing
In the top directory, run `pytest amarillo/tests`.
## Docker
Based on [tiangolo/uvicorn-gunicorn:python3.9-slim](https://github.com/tiangolo/uvicorn-gunicorn-docker)
- build `docker build -t amarillo --build-arg="PLUGINS=amarillo-metrics" .`
- run `docker run --rm --name amarillo -p 8000:80 -e MAX_WORKERS="1" -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`

View file

@ -1 +0,0 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

View file

@ -1,14 +1,14 @@
# separate file so that it can be imported without initializing FastAPI # separate file so that it can be imported without initializing FastAPI
from amarillo.utils.container import container from amarillo.app.utils.container import container
import logging import logging
from amarillo.services.users import UserService, user_conf_directory from amarillo.app.services.agencyconf import AgencyConfService, agency_conf_directory
from amarillo.services.agencies import AgencyService from amarillo.app.services.agencies import AgencyService
from amarillo.services.regions import RegionService from amarillo.app.services.regions import RegionService
from amarillo.services.config import config from amarillo.app.services.config import config
from amarillo.utils.utils import assert_folder_exists from amarillo.app.utils.utils import assert_folder_exists
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -27,12 +27,12 @@ def create_required_directories():
assert_folder_exists(f'data/{subdir}/{agency_id}') assert_folder_exists(f'data/{subdir}/{agency_id}')
# Agency configurations # Agency configurations
assert_folder_exists(user_conf_directory) assert_folder_exists(agency_conf_directory)
def configure_services(): def configure_services():
container['users'] = UserService() container['agencyconf'] = AgencyConfService()
logger.info("Loaded %d user configuration(s)", len(container['users'].user_id_to_user_conf)) logger.info("Loaded %d agency configuration(s)", len(container['agencyconf'].agency_id_to_agency_conf))
container['agencies'] = AgencyService() container['agencies'] = AgencyService()
logger.info("Loaded %d agencies", len(container['agencies'].agencies)) logger.info("Loaded %d agencies", len(container['agencies'].agencies))

View file

@ -1,26 +1,20 @@
import logging.config import logging.config
import importlib
import pkgutil from amarillo.app.configuration import configure_services, configure_admin_token
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main")
import uvicorn import uvicorn
import mimetypes import mimetypes
from starlette.staticfiles import StaticFiles from starlette.staticfiles import StaticFiles
from amarillo.utils.utils import copy_static_files
#this has to run before app.configuration is imported, otherwise we get validation error for config because the config file is not copied yet
copy_static_files(["conf", "static", "templates", "logging.conf", "config"])
import amarillo.plugins from amarillo.app.routers import carpool, agency, agencyconf, region
from amarillo.configuration import configure_services, configure_admin_token
from amarillo.routers import carpool, agency, region, users
import amarillo.services.oauth2 as oauth2
from fastapi import FastAPI from fastapi import FastAPI
# https://pydantic-docs.helpmanual.io/usage/settings/ # https://pydantic-docs.helpmanual.io/usage/settings/
from amarillo.views import home # from amarillo.app.views import home
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main")
logger.info("Hello Amarillo!") logger.info("Hello Amarillo!")
@ -55,10 +49,6 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
}, },
}], }],
servers=[ servers=[
{
"description": "MobiData BW Amarillo service",
"url": "https://amarillo.mobidata-bw.de"
},
{ {
"description": "DABB bbnavi Amarillo service", "description": "DABB bbnavi Amarillo service",
"url": "https://amarillo.bbnavi.de" "url": "https://amarillo.bbnavi.de"
@ -71,10 +61,6 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
"description": "Dev server for development", "description": "Dev server for development",
"url": "https://amarillo-dev.mfdz.de" "url": "https://amarillo-dev.mfdz.de"
}, },
{
"description": "Server for Mitanand project",
"url": "https://mitanand.mfdz.de"
},
{ {
"description": "Localhost for development", "description": "Localhost for development",
"url": "http://localhost:8000" "url": "http://localhost:8000"
@ -85,9 +71,15 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
app.include_router(carpool.router) app.include_router(carpool.router)
app.include_router(agency.router) app.include_router(agency.router)
app.include_router(users.router) app.include_router(agencyconf.router)
app.include_router(region.router) app.include_router(region.router)
app.include_router(oauth2.router)
import importlib
import pkgutil
import amarillo.plugins
def iter_namespace(ns_pkg): def iter_namespace(ns_pkg):
@ -100,27 +92,27 @@ def load_plugins():
for finder, name, ispkg for finder, name, ispkg
in iter_namespace(amarillo.plugins) in iter_namespace(amarillo.plugins)
} }
logger.info(f"Discovered plugins: {list(discovered_plugins.keys())}") print(f"Discovered plugins: {list(discovered_plugins.keys())}")
for name, module in discovered_plugins.items(): for name, module in discovered_plugins.items():
if hasattr(module, "setup"): if hasattr(module, "setup"):
logger.info(f"Running setup function for {name}") print(f"Running setup function for {name}")
module.setup(app) module.setup(app)
else: logger.info(f"Did not find setup function for {name}") else: print(f"Did not find setup function for {name}")
def configure(): def configure():
configure_admin_token() configure_admin_token()
configure_services() configure_services()
configure_routing() # configure_routing()
load_plugins() load_plugins()
def configure_routing(): # def configure_routing():
mimetypes.add_type('application/x-protobuf', '.pbf') # mimetypes.add_type('application/x-protobuf', '.pbf')
app.mount('/static', StaticFiles(directory='static'), name='static') # app.mount('/static', StaticFiles(directory='static'), name='static')
app.mount('/gtfs', StaticFiles(directory='data/gtfs'), name='gtfs') # app.mount('/gtfs', StaticFiles(directory='data/gtfs'), name='gtfs')
app.include_router(home.router) # app.include_router(home.router)
if __name__ == "__main__": if __name__ == "__main__":
@ -128,3 +120,4 @@ if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000) uvicorn.run(app, host="0.0.0.0", port=8000)
else: else:
configure() configure()
pass

View file

@ -0,0 +1,26 @@
from pydantic import ConfigDict, BaseModel, Field
class AgencyConf(BaseModel):
agency_id: str = Field(
description="ID of the agency that uses this token.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
api_key: str = Field(
description="The agency's API key for using the API",
min_length=20,
max_length=256,
pattern=r'^[a-zA-Z0-9]+$',
examples=["d8yLuY4DqMEUCLcfJASi"])
model_config = ConfigDict(json_schema_extra={
"title": "Agency Configuration",
"description": "Configuration for an agency.",
"example":
{
"agency_id": "mfdz",
"api_key": "d8yLuY4DqMEUCLcfJASi"
}
})

View file

@ -4,7 +4,7 @@ from typing import List, Union, Set, Optional, Tuple
from datetime import time from datetime import time
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from geojson_pydantic.geometries import LineString from geojson_pydantic.geometries import LineString
from enum import Enum, IntEnum from enum import Enum
NumType = Union[float, int] NumType = Union[float, int]
@ -24,15 +24,6 @@ class PickupDropoffType(str, Enum):
only_pickup = "only_pickup" only_pickup = "only_pickup"
only_dropoff = "only_dropoff" only_dropoff = "only_dropoff"
class YesNoEnum(IntEnum):
yes = 1
no = 2
class LuggageSize(IntEnum):
small = 1
medium = 2
large = 3
class StopTime(BaseModel): class StopTime(BaseModel):
id: Optional[str] = Field( id: Optional[str] = Field(
None, None,
@ -119,83 +110,7 @@ class Region(BaseModel):
bbox: Tuple[NumType, NumType, NumType, NumType] = Field( bbox: Tuple[NumType, NumType, NumType, NumType] = Field(
description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]", description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]",
examples=[[10.5,49.2,11.3,51.3]]) examples=[[10.5,49.2,11.3,51.3]])
class RidesharingInfo(BaseModel):
number_free_seats: int = Field(
description="Number of free seats",
ge=0,
examples=[3])
same_gender: Optional[YesNoEnum] = Field(
None,
description="Trip only for same gender:"
"1: Yes"
"2: No",
examples=[1])
luggage_size: Optional[LuggageSize] = Field(
None,
description="Size of the luggage:"
"1: small"
"2: medium"
"3: large",
examples=[3])
animal_car: Optional[YesNoEnum] = Field(
None,
description="Animals in Car allowed:"
"1: Yes"
"2: No",
examples=[2])
car_model: Optional[str] = Field(
None,
description="Car model",
min_length=1,
max_length=48,
examples=["Golf"])
car_brand: Optional[str] = Field(
None,
description="Car brand",
min_length=1,
max_length=48,
examples=["VW"])
creation_date: datetime = Field(
description="Date when trip was created",
examples=["2022-02-13T20:20:39+00:00"])
smoking: Optional[YesNoEnum] = Field(
None,
description="Smoking allowed:"
"1: Yes"
"2: No",
examples=[2])
payment_method: Optional[str] = Field(
None,
description="Method of payment",
min_length=1,
max_length=48)
class Driver(BaseModel):
driver_id: Optional[str] = Field(
None,
description="Identifies the driver.",
min_length=1,
max_length=256,
pattern='^[a-zA-Z0-9_-]+$',
examples=["789"])
profile_picture: Optional[HttpUrl] = Field(
None,
description="URL that contains the profile picture",
examples=["https://mfdz.de/driver/789/picture"])
rating: Optional[int] = Field(
None,
description="Rating of the driver from 1 to 5."
"0 no rating yet",
ge=0,
le=5,
examples=[5])
class Agency(BaseModel): class Agency(BaseModel):
id: str = Field( id: str = Field(
description="ID of the agency.", description="ID of the agency.",
@ -280,17 +195,6 @@ class Carpool(BaseModel):
max_length=20, max_length=20,
pattern='^[a-zA-Z0-9]+$', pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"]) examples=["mfdz"])
driver: Optional[Driver] = Field(
None,
description="Driver data",
examples=["""
{
"driver_id": "123",
"profile_picture": "https://mfdz.de/driver/789/picture",
"rating": 5
}
"""])
deeplink: HttpUrl = Field( deeplink: HttpUrl = Field(
description="Link to an information page providing detail information " description="Link to an information page providing detail information "
@ -341,22 +245,7 @@ class Carpool(BaseModel):
"published.", "published.",
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", ' examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
'"sunday"]']) '"sunday"]'])
route_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Route color designation that matches public facing material. "
"The color difference between route_color and route_text_color "
"should provide sufficient contrast when viewed on a black and "
"white screen.",
examples=["0039A6"])
route_text_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Legible color to use for text drawn against a background of "
"route_color. The color difference between route_color and "
"route_text_color should provide sufficient contrast when "
"viewed on a black and white screen.",
examples=["D4D2D2"])
path: Optional[LineString] = Field( path: Optional[LineString] = Field(
None, description="Optional route geometry as json LineString.") None, description="Optional route geometry as json LineString.")
@ -368,18 +257,6 @@ class Carpool(BaseModel):
"purge outdated offers (e.g. older than 180 days). If not " "purge outdated offers (e.g. older than 180 days). If not "
"passed, the service may assume 'now'", "passed, the service may assume 'now'",
examples=["2022-02-13T20:20:39+00:00"]) examples=["2022-02-13T20:20:39+00:00"])
additional_ridesharing_info: Optional[RidesharingInfo] = Field(
None,
description="Extension of GRFS to the GTFS standard",
examples=["""
{
"number_free_seats": 2,
"creation_date": "2022-02-13T20:20:39+00:00",
"same_gender": 2,
"smoking": 1,
"luggage_size": 3
}
"""])
model_config = ConfigDict(json_schema_extra={ model_config = ConfigDict(json_schema_extra={
"title": "Carpool", "title": "Carpool",
# description ... # description ...

View file

@ -0,0 +1,29 @@
from collections import namedtuple
from datetime import timedelta
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name')
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed')
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
# TODO Move to utils
class GtfsTimeDelta(timedelta):
def __str__(self):
seconds = self.total_seconds()
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = seconds % 60
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
return (str)
def __add__(self, other):
if isinstance(other, timedelta):
return self.__class__(self.days + other.days,
self.seconds + other.seconds,
self.microseconds + other.microseconds)
return NotImplemented

View file

@ -4,14 +4,13 @@ from typing import List
from fastapi import APIRouter, HTTPException, status, Depends from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Carpool, Agency from amarillo.app.models.Carpool import Carpool, Agency
from amarillo.models.User import User from amarillo.app.routers.agencyconf import verify_api_key, verify_admin_api_key, verify_permission_for_same_agency_or_admin
from amarillo.services.oauth2 import get_current_user, verify_permission
# TODO should move this to service # TODO should move this to service
from amarillo.routers.carpool import store_carpool, delete_agency_carpools_older_than from amarillo.app.routers.carpool import store_carpool, delete_agency_carpools_older_than
from amarillo.services.agencies import AgencyService from amarillo.app.services.agencies import AgencyService
from amarillo.services.importing.ride2go import import_ride2go from amarillo.app.services.importing.ride2go import import_ride2go
from amarillo.utils.container import container from amarillo.app.utils.container import container
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -33,7 +32,7 @@ router = APIRouter(
status.HTTP_404_NOT_FOUND: {"description": "Agency not found"}, status.HTTP_404_NOT_FOUND: {"description": "Agency not found"},
}, },
) )
async def get_agency(agency_id: str, requesting_user: User = Depends(get_current_user)) -> Agency: async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key)) -> Agency:
agencies: AgencyService = container['agencies'] agencies: AgencyService = container['agencies']
agency = agencies.get_agency(agency_id) agency = agencies.get_agency(agency_id)
agency_exists = agency is not None agency_exists = agency is not None
@ -53,7 +52,6 @@ async def get_agency(agency_id: str, requesting_user: User = Depends(get_current
operation_id="sync", operation_id="sync",
summary="Synchronizes all carpool offers", summary="Synchronizes all carpool offers",
response_model=List[Carpool], response_model=List[Carpool],
response_model_exclude_none=True,
responses={ responses={
status.HTTP_200_OK: { status.HTTP_200_OK: {
"description": "Carpool created"}, "description": "Carpool created"},
@ -62,8 +60,8 @@ async def get_agency(agency_id: str, requesting_user: User = Depends(get_current
status.HTTP_500_INTERNAL_SERVER_ERROR: { status.HTTP_500_INTERNAL_SERVER_ERROR: {
"description": "Import error"} "description": "Import error"}
}) })
async def sync(agency_id: str, requesting_user: User = Depends(get_current_user)) -> List[Carpool]: async def sync(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)) -> List[Carpool]:
verify_permission(f"{agency_id}:sync") await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
if agency_id == "ride2go": if agency_id == "ride2go":
import_function = import_ride2go import_function = import_ride2go

View file

@ -0,0 +1,103 @@
import logging
from typing import List
from fastapi import APIRouter, HTTPException, status, Header, Depends
from amarillo.app.models.AgencyConf import AgencyConf
from amarillo.app.services.agencyconf import AgencyConfService
from amarillo.app.services.config import config
from amarillo.app.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/agencyconf",
tags=["agencyconf"]
)
# This endpoint is not shown in PROD installations, only in development
# TODO make this an explicit config option
include_in_schema = config.env != 'PROD'
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_admin_api_key(X_API_Key: str = Header(...)):
if X_API_Key != config.admin_token:
message="X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
return "admin"
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_api_key(X_API_Key: str = Header(...)):
agency_conf_service: AgencyConfService = container['agencyconf']
return agency_conf_service.check_api_key(X_API_Key)
# TODO Return code 403 Unauthoized (in response_status_codes as well...)
async def verify_permission_for_same_agency_or_admin(agency_id_in_path_or_body, agency_id_from_api_key):
"""Verifies that an agency is accessing something it owns or the user is admin
The agency_id is part of some paths, or when not in the path it is in the body, e.g. in PUT /carpool.
This function encapsulates the formula 'working with own stuff, or admin'.
"""
is_permitted = agency_id_in_path_or_body == agency_id_from_api_key or agency_id_from_api_key == "admin"
if not is_permitted:
message = f"Working with {agency_id_in_path_or_body} resources is not permitted for {agency_id_from_api_key}."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
@router.get("/",
include_in_schema=include_in_schema,
operation_id="getAgencyIdsWhichHaveAConfiguration",
summary="Get agency_ids which have a configuration",
response_model=List[str],
description="Returns the agency_ids but not the details.",
status_code=status.HTTP_200_OK)
async def get_agency_ids(admin_api_key: str = Depends(verify_api_key)) -> [str]:
return container['agencyconf'].get_agency_ids()
@router.post("/",
include_in_schema=include_in_schema,
operation_id="postNewAgencyConf",
summary="Post a new AgencyConf")
async def post_agency_conf(agency_conf: AgencyConf, admin_api_key: str = Depends(verify_admin_api_key)):
agency_conf_service: AgencyConfService = container['agencyconf']
agency_conf_service.add(agency_conf)
# TODO 400->403
@router.delete("/{agency_id}",
include_in_schema=include_in_schema,
operation_id="deleteAgencyConf",
status_code=status.HTTP_200_OK,
summary="Delete configuration of an agency. Returns true if the token for the agency existed, "
"false if it didn't exist."
)
async def delete_agency_conf(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)):
agency_may_delete_own = requesting_agency_id == agency_id
admin_may_delete_everything = requesting_agency_id == "admin"
is_permitted = agency_may_delete_own or admin_may_delete_everything
if not is_permitted:
message = f"The API key for {requesting_agency_id} can not delete the configuration for {agency_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_conf_service: AgencyConfService = container['agencyconf']
agency_exists = agency_id in agency_conf_service.get_agency_ids()
if not agency_exists:
message = f"No config for {agency_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_conf_service.delete(agency_id)

View file

@ -5,13 +5,12 @@ import os.path
import re import re
from glob import glob from glob import glob
from fastapi import APIRouter, Body, HTTPException, status, Depends from fastapi import APIRouter, Body, Header, HTTPException, status, Depends
from datetime import datetime from datetime import datetime
from amarillo.models.Carpool import Carpool from amarillo.app.models.Carpool import Carpool
from amarillo.models.User import User from amarillo.app.routers.agencyconf import verify_api_key, verify_permission_for_same_agency_or_admin
from amarillo.services.oauth2 import get_current_user, verify_permission from amarillo.app.tests.sampledata import examples
from amarillo.tests.sampledata import examples
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -26,20 +25,21 @@ router = APIRouter(
summary="Add a new or update existing carpool", summary="Add a new or update existing carpool",
description="Carpool object to be created or updated", description="Carpool object to be created or updated",
response_model=Carpool, response_model=Carpool,
response_model_exclude_none=True,
responses={ responses={
status.HTTP_404_NOT_FOUND: { status.HTTP_404_NOT_FOUND: {
"description": "Agency does not exist"}, "description": "Agency does not exist"},
}) })
async def post_carpool(carpool: Carpool = Body(..., examples=examples), async def post_carpool(carpool: Carpool = Body(..., examples=examples),
requesting_user: User = Depends(get_current_user)) -> Carpool: requesting_agency_id: str = Depends(verify_api_key)) -> Carpool:
verify_permission(f"{carpool.agency}:write", requesting_user) await verify_permission_for_same_agency_or_admin(carpool.agency, requesting_agency_id)
logger.info(f"POST trip {carpool.agency}:{carpool.id}.") logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
await assert_agency_exists(carpool.agency) await assert_agency_exists(carpool.agency)
await store_carpool(carpool) await set_lastUpdated_if_unset(carpool)
await save_carpool(carpool)
return carpool return carpool
@ -48,15 +48,12 @@ async def post_carpool(carpool: Carpool = Body(..., examples=examples),
operation_id="getcarpoolById", operation_id="getcarpoolById",
summary="Find carpool by ID", summary="Find carpool by ID",
response_model=Carpool, response_model=Carpool,
response_model_exclude_none=True,
description="Find carpool by ID", description="Find carpool by ID",
responses={ responses={
status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"}, status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"},
}, },
) )
async def get_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)) -> Carpool: async def get_carpool(agency_id: str, carpool_id: str, api_key: str = Depends(verify_api_key)) -> Carpool:
verify_permission(f"{agency_id}:read", requesting_user)
logger.info(f"Get trip {agency_id}:{carpool_id}.") logger.info(f"Get trip {agency_id}:{carpool_id}.")
await assert_agency_exists(agency_id) await assert_agency_exists(agency_id)
await assert_carpool_exists(agency_id, carpool_id) await assert_carpool_exists(agency_id, carpool_id)
@ -75,8 +72,8 @@ async def get_carpool(agency_id: str, carpool_id: str, requesting_user: User = D
"description": "Carpool or agency not found"}, "description": "Carpool or agency not found"},
}, },
) )
async def delete_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)): async def delete_carpool(agency_id: str, carpool_id: str, requesting_agency_id: str = Depends(verify_api_key)):
verify_permission(f"{agency_id}:write", requesting_user) await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
logger.info(f"Delete trip {agency_id}:{carpool_id}.") logger.info(f"Delete trip {agency_id}:{carpool_id}.")
await assert_agency_exists(agency_id) await assert_agency_exists(agency_id)
@ -93,30 +90,10 @@ async def _delete_carpool(agency_id: str, carpool_id: str):
logger.info(f"Saved carpool {agency_id}:{carpool_id} in trash.") logger.info(f"Saved carpool {agency_id}:{carpool_id} in trash.")
os.remove(f"data/carpool/{agency_id}/{carpool_id}.json") os.remove(f"data/carpool/{agency_id}/{carpool_id}.json")
try:
from amarillo.plugins.metrics import trips_deleted_counter
trips_deleted_counter.inc()
except ImportError:
pass
async def store_carpool(carpool: Carpool) -> Carpool: async def store_carpool(carpool: Carpool) -> Carpool:
carpool_exists = os.path.exists(f"data/carpool/{carpool.agency}/{carpool.id}.json")
await set_lastUpdated_if_unset(carpool) await set_lastUpdated_if_unset(carpool)
await save_carpool(carpool) await save_carpool(carpool)
try:
from amarillo.plugins.metrics import trips_created_counter, trips_updated_counter
if(carpool_exists):
# logger.info("Incrementing trips updated")
trips_updated_counter.inc()
else:
# logger.info("Incrementing trips created")
trips_created_counter.inc()
except ImportError:
pass
return carpool return carpool
async def set_lastUpdated_if_unset(carpool): async def set_lastUpdated_if_unset(carpool):
@ -137,7 +114,7 @@ async def save_carpool(carpool, folder: str = 'data/carpool'):
async def assert_agency_exists(agency_id: str): async def assert_agency_exists(agency_id: str):
agency_exists = os.path.exists(f"data/agency/{agency_id}.json") agency_exists = os.path.exists(f"conf/agency/{agency_id}.json")
if not agency_exists: if not agency_exists:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,

View file

@ -0,0 +1,88 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.app.models.Carpool import Region
from amarillo.app.routers.agencyconf import verify_admin_api_key
from amarillo.app.services.regions import RegionService
from amarillo.app.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region
@router.get("/{region_id}/gtfs",
summary="Return GTFS Feed for this region",
response_description="GTFS-Feed (zip-file)",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
}
)
async def get_file(region_id: str, user: str = Depends(verify_admin_api_key)):
_assert_region_exists(region_id)
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
@router.get("/{region_id}/gtfs-rt",
summary="Return GTFS-RT Feed for this region",
response_description="GTFS-RT-Feed",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
}
)
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_admin_api_key)):
_assert_region_exists(region_id)
if format == 'json':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
elif format == 'protobuf':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
else:
message = "Specified format is not supported, i.e. neither protobuf nor json."
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)

View file

@ -2,7 +2,7 @@ import json
from glob import glob from glob import glob
from typing import Dict from typing import Dict
from amarillo.models.Carpool import Agency from amarillo.app.models.Carpool import Agency
# TODO FG HB this service should also listen to pyinotify # TODO FG HB this service should also listen to pyinotify
# because the (updated) agencies are needed in the enhancer # because the (updated) agencies are needed in the enhancer
@ -12,7 +12,8 @@ class AgencyService:
def __init__(self): def __init__(self):
self.agencies: Dict[str, Agency] = {} self.agencies: Dict[str, Agency] = {}
for agency_file_name in glob('data/agency/*.json'):
for agency_file_name in glob('conf/agency/*.json'):
with open(agency_file_name) as agency_file: with open(agency_file_name) as agency_file:
dict = json.load(agency_file) dict = json.load(agency_file)
agency = Agency(**dict) agency = Agency(**dict)

View file

@ -0,0 +1,111 @@
import json
import os
from glob import glob
from typing import Dict, List
import logging
from fastapi import HTTPException, status
from amarillo.app.models.AgencyConf import AgencyConf
from amarillo.app.services.config import config
logger = logging.getLogger(__name__)
agency_conf_directory = 'data/agencyconf'
class AgencyConfService:
def __init__(self):
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
self.agency_id_to_agency_conf: Dict[str, AgencyConf] = {}
self.api_key_to_agency_id: Dict[str, str] = {}
for agency_conf_file_name in glob(f'{agency_conf_directory}/*.json'):
with open(agency_conf_file_name) as agency_conf_file:
dictionary = json.load(agency_conf_file)
agency_conf = AgencyConf(**dictionary)
agency_id = agency_conf.agency_id
api_key = agency_conf.api_key
self.agency_id_to_agency_conf[agency_id] = agency_conf
self.api_key_to_agency_id[api_key] = agency_conf.agency_id
def get_agency_conf(self, agency_id: str) -> AgencyConf:
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
return agency_conf
def check_api_key(self, api_key: str) -> str:
"""Check if the API key is valid
The agencies' api keys are checked first, and the admin's key.
The agency_id or "admin" is returned for further checks in the caller if the
request is permitted, like {agency_id} == agency_id.
"""
agency_id = self.api_key_to_agency_id.get(api_key)
is_agency = agency_id is not None
if is_agency:
return agency_id
is_admin = api_key == config.admin_token
if is_admin:
return "admin"
message = "X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=400, detail=message)
def add(self, agency_conf: AgencyConf):
agency_id = agency_conf.agency_id
api_key = agency_conf.api_key
agency_id_exists_already = self.agency_id_to_agency_conf.get(agency_id) is not None
if agency_id_exists_already:
message = f"Agency {agency_id} exists already. To update, delete it first."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_using_this_api_key_already = self.api_key_to_agency_id.get(api_key)
a_different_agency_is_using_this_api_key_already = \
agency_using_this_api_key_already is not None and \
agency_using_this_api_key_already != agency_id
if a_different_agency_is_using_this_api_key_already:
message = f"Duplicate API Key for {agency_id} not permitted. Use a different key."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
with open(f'{agency_conf_directory}/{agency_id}.json', 'w', encoding='utf-8') as f:
f.write(agency_conf.json())
self.agency_id_to_agency_conf[agency_id] = agency_conf
self.api_key_to_agency_id[api_key] = agency_id
logger.info(f"Added configuration for agency {agency_id}.")
def get_agency_ids(self) -> List[str]:
return list(self.agency_id_to_agency_conf.keys())
def delete(self, agency_id):
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
api_key = agency_conf.api_key
del self.api_key_to_agency_id[api_key]
del self.agency_id_to_agency_conf[agency_id]
os.remove(f'{agency_conf_directory}/{agency_id}.json')
logger.info(f"Deleted configuration for agency {agency_id}.")

View file

@ -2,9 +2,9 @@ import json
import logging import logging
from datetime import datetime from datetime import datetime
from typing import Dict from typing import Dict
from amarillo.models.Carpool import Carpool from amarillo.app.models.Carpool import Carpool
from amarillo.services.trips import TripStore from amarillo.app.services.trips import TripStore
from amarillo.utils.utils import yesterday, is_older_than_days from amarillo.app.utils.utils import yesterday, is_older_than_days
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -37,11 +37,6 @@ class CarpoolService():
if cp and self.is_outdated(cp): if cp and self.is_outdated(cp):
logger.info("Purge outdated offer %s", key) logger.info("Purge outdated offer %s", key)
self.delete(cp.agency, cp.id) self.delete(cp.agency, cp.id)
try:
from amarillo.plugins.metrics import trips_deleted_counter
trips_deleted_counter.inc()
except ImportError:
pass
def get(self, agency_id: str, carpool_id: str): def get(self, agency_id: str, carpool_id: str):
return self.carpools.get(f"{agency_id}:{carpool_id}") return self.carpools.get(f"{agency_id}:{carpool_id}")

View file

@ -6,7 +6,6 @@ class Config(BaseSettings):
admin_token: str admin_token: str
ride2go_query_data: str ride2go_query_data: str
env: str = 'DEV' env: str = 'DEV'
graphhopper_base_url: str = 'https://api.mfdz.de/gh'
stop_sources_file: str = 'data/stop_sources.json'
config = Config(_env_file='config', _env_file_encoding='utf-8') config = Config(_env_file='config', _env_file_encoding='utf-8')

View file

@ -0,0 +1,137 @@
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2
import amarillo.app.services.gtfsrt.realtime_extension_pb2 as mfdzrte
from amarillo.app.services.gtfs_constants import *
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import ParseDict
from datetime import datetime, timedelta
import json
import re
import time
class GtfsRtProducer():
def __init__(self, trip_store):
self.trip_store = trip_store
def generate_feed(self, time, format='protobuf', bbox=None):
# See https://developers.google.com/transit/gtfs-realtime/reference
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
gtfsrt_dict = {
'header': {
'gtfsRealtimeVersion': '1.0',
'timestamp': int(time)
},
'entity': self._get_trip_updates(bbox)
}
feed = gtfs_realtime_pb2.FeedMessage()
ParseDict(gtfsrt_dict, feed)
if "message" == format.lower():
return feed
elif "json" == format.lower():
return MessageToDict(feed)
else:
return feed.SerializeToString()
def export_feed(self, timestamp, file_path, bbox=None):
"""
Exports gtfs-rt feed as .json and .pbf file to file_path
"""
feed = self.generate_feed(timestamp, "message", bbox)
with open(f"{file_path}.pbf", "wb") as f:
f.write(feed.SerializeToString())
with open(f"{file_path}.json", "w") as f:
json.dump(MessageToDict(feed), f)
def _get_trip_updates(self, bbox = None):
trips = []
trips.extend(self._get_added(bbox))
trips.extend(self._get_deleted(bbox))
trip_updates = []
for num, trip in enumerate(trips):
trip_updates.append( {
'id': f'carpool-update-{num}',
'tripUpdate': trip
}
)
return trip_updates
def _get_deleted(self, bbox = None):
return self._get_updates(
self.trip_store.recently_deleted_trips(),
self._as_delete_updates,
bbox)
def _get_added(self, bbox = None):
return self._get_updates(
self.trip_store.recently_added_trips(),
self._as_added_updates,
bbox)
def _get_updates(self, trips, update_func, bbox = None):
updates = []
today = datetime.today()
for t in trips:
if bbox == None or t.intersects(bbox):
updates.extend(update_func(t, today))
return updates
def _as_delete_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'CANCELED',
'routeId': trip.trip_id
}
} for trip_date in trip.next_trip_dates(fromdate)]
def _to_seconds(self, fromdate, stop_time):
startdate = datetime.strptime(fromdate, '%Y%m%d')
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
delta = timedelta(
hours=int(m.group(1)),
minutes=int(m.group(2)),
seconds=int(m.group(3)))
return time.mktime((startdate + delta).timetuple())
def _to_stop_times(self, trip, fromdate):
return [{
'stopSequence': stoptime.stop_sequence,
'arrival': {
'time': self._to_seconds(fromdate, stoptime.arrival_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'departure': {
'time': self._to_seconds(fromdate, stoptime.departure_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'stopId': stoptime.stop_id,
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
}
}
}
for stoptime in trip.stop_times]
def _as_added_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'ADDED',
'routeId': trip.trip_id,
'[transit_realtime.trip_descriptor]': {
'routeUrl' : trip.url,
'agencyId' : trip.agency,
'route_long_name' : trip.route_long_name(),
'route_type': RIDESHARING_ROUTE_TYPE
}
},
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
} for trip_date in trip.next_trip_dates(fromdate)]

View file

@ -0,0 +1,14 @@
# Constants
NO_BIKES_ALLOWED = 2
RIDESHARING_ROUTE_TYPE = 1551
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
STOP_TIMES_STOP_TYPE_REGULARLY = 0
STOP_TIMES_STOP_TYPE_NONE = 1
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
STOP_TIMES_TIMEPOINT_EXACT = 1
MFDZ_DEFAULT_UNCERTAINITY = 600

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: realtime_extension.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
# @@protoc_insertion_point(module_scope)

View file

@ -2,10 +2,10 @@ import logging
from typing import List from typing import List
import requests import requests
from amarillo.models.Carpool import Carpool, StopTime from amarillo.app.models.Carpool import Carpool, StopTime
from amarillo.services.config import config from amarillo.app.services.config import config
from amarillo.services.secrets import secrets from amarillo.app.services.secrets import secrets
import re import re
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -2,14 +2,15 @@ import json
from glob import glob from glob import glob
from typing import Dict from typing import Dict
from amarillo.models.Carpool import Region from amarillo.app.models.Carpool import Region
class RegionService: class RegionService:
def __init__(self): def __init__(self):
self.regions: Dict[str, Region] = {} self.regions: Dict[str, Region] = {}
for region_file_name in glob('data/region/*.json'):
for region_file_name in glob('conf/region/*.json'):
with open(region_file_name) as region_file: with open(region_file_name) as region_file:
dict = json.load(region_file) dict = json.load(region_file)
region = Region(**dict) region = Region(**dict)

View file

@ -0,0 +1,47 @@
import requests
import logging
logger = logging.getLogger(__name__)
class RoutingException(Exception):
def __init__(self, message):
# Call Exception.__init__(message)
# to use the same Message header as the parent class
super().__init__(message)
class RoutingService():
def __init__(self, gh_url = 'https://api.mfdz.de/gh'):
self.gh_service_url = gh_url
def path_for_stops(self, points):
# Retrieve graphhopper route traversing given points
directions = self._get_directions(points)
if directions and len(directions.get("paths"))>0:
return directions.get("paths")[0]
else:
return {}
def _get_directions(self, points):
req_url = self._create_url(points, True, True)
logger.debug("Get directions via: {}".format(req_url))
response = requests.get(req_url)
status = response.status_code
if status == 200:
# Found route between points
return response.json()
else:
try:
message = response.json().get('message')
except:
raise RoutingException("Get directions failed with status code {}".format(status))
else:
raise RoutingException(message)
def _create_url(self, points, calc_points = False, instructions = False):
""" Creates GH request URL """
locations = ""
for point in points:
locations += "point={0}%2C{1}&".format(point.y, point.x)
return "{0}/route?{1}instructions={2}&calc_points={3}&points_encoded=false".format(
self.gh_service_url, locations, instructions, calc_points)

View file

@ -1,10 +1,12 @@
from pydantic import Field, ConfigDict from typing import Dict
from pydantic import Field
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
# Example: secrets = { "mfdz": "some secret" } # Example: secrets = { "mfdz": "some secret" }
class Secrets(BaseSettings): class Secrets(BaseSettings):
model_config = ConfigDict(extra='allow')
ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN') ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN')
secret_key: str = Field(None, env = 'SECRET_KEY') metrics_user: str = Field(None, env = 'METRICS_USER')
metrics_password: str = Field(None, env = 'METRICS_PASSWORD')
# Read if file exists, otherwise no error (it's in .gitignore) # Read if file exists, otherwise no error (it's in .gitignore)

View file

@ -0,0 +1,182 @@
import csv
import geopandas as gpd
import pandas as pd
from amarillo.app.models.Carpool import StopTime
from contextlib import closing
from shapely.geometry import Point, LineString
from shapely.ops import transform
from pyproj import Proj, Transformer
import re
import requests
from io import TextIOWrapper
import codecs
import logging
logger = logging.getLogger(__name__)
class StopsStore():
def __init__(self, stop_sources = [], internal_projection = "EPSG:32632"):
self.internal_projection = internal_projection
self.projection = Transformer.from_crs("EPSG:4326", internal_projection, always_xy=True).transform
self.stopsDataFrames = []
self.stop_sources = stop_sources
def load_stop_sources(self):
"""Imports stops from stop_sources and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m.
Subsequent calls of load_stop_sources will reload all stop_sources
but replace the current stops only if all stops could be loaded successfully.
"""
stopsDataFrames = []
error_occured = False
for stops_source in self.stop_sources:
try:
stopsDataFrame =self._load_stops(stops_source["url"])
stopsDataFrames.append({'distanceInMeter': stops_source["vicinity"],
'stops': stopsDataFrame})
except Exception as err:
error_occured = True
logger.error("Failed to load stops from %s to StopsStore.", stops_source["url"], exc_info=True)
if not error_occured:
self.stopsDataFrames = stopsDataFrames
def find_additional_stops_around(self, line, stops = None):
"""Returns a GeoDataFrame with all stops in vicinity of the
given line, sorted by distance from origin of the line.
Note: for internal projection/distance calculations, the
lat/lon geometries of line and stops are converted to
"""
stops_frames = []
if stops:
stops_frames.append(self._convert_to_dataframe(stops))
transformedLine = transform(self.projection, LineString(line.coordinates))
for stops_to_match in self.stopsDataFrames:
stops_frames.append(self._find_stops_around_transformed(stops_to_match['stops'], transformedLine, stops_to_match['distanceInMeter']))
stops = gpd.GeoDataFrame( pd.concat(stops_frames, ignore_index=True, sort=True))
if not stops.empty:
self._sort_by_distance(stops, transformedLine)
return stops
def find_closest_stop(self, carpool_stop, max_search_distance):
transformedCoord = Point(self.projection(carpool_stop.lon, carpool_stop.lat))
best_dist = max_search_distance + 1
best_stop = None
for stops_with_dist in self.stopsDataFrames:
stops = stops_with_dist['stops']
s, d = stops.sindex.nearest(transformedCoord, return_all= True, return_distance=True, max_distance=max_search_distance)
if len(d) > 0 and d[0] < best_dist:
best_dist = d[0]
row = s[1][0]
best_stop = StopTime(name=stops.at[row, 'stop_name'], lat=stops.at[row, 'y'], lon=stops.at[row, 'x'])
return best_stop if best_stop else carpool_stop
def _normalize_stop_name(self, stop_name):
default_name = 'P+R-Parkplatz'
if stop_name in ('', 'Park&Ride'):
return default_name
normalized_stop_name = re.sub(r"P(ark)?\s?[\+&]\s?R(ail|ide)?",'P+R', stop_name)
return normalized_stop_name
def _load_stops(self, source : str):
"""Loads stops from given source and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m
"""
logger.info("Load stops from %s", source)
if source.startswith('http'):
if source.endswith('json'):
with requests.get(source) as json_source:
stopsDataFrame = self._load_stops_geojson(json_source.json())
else:
with requests.get(source) as csv_source:
stopsDataFrame = self._load_stops_csv(codecs.iterdecode(csv_source.iter_lines(), 'utf-8'))
else:
with open(source, encoding='utf-8') as csv_source:
stopsDataFrame = self._load_stops_csv(csv_source)
return stopsDataFrame
def _load_stops_csv(self, csv_source):
id = []
lat = []
lon = []
stop_name = []
reader = csv.DictReader(csv_source, delimiter=';')
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in reader:
for col, lst in zip(columns, lists):
if col == "stop_lat" or col == "stop_lon":
lst.append(float(row[col].replace(",",".")))
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row[col])
lst.append(row_stop_name)
else:
lst.append(row[col])
return self._as_dataframe(id, lat, lon, stop_name)
def _load_stops_geojson(self, geojson_source):
id = []
lat = []
lon = []
stop_name = []
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in geojson_source['features']:
coord = row['geometry']['coordinates']
if not coord or not row['properties'].get('name'):
logger.error('Stop feature {} has null coord or name'.format(row['id']))
continue
for col, lst in zip(columns, lists):
if col == "stop_lat":
lst.append(coord[1])
elif col == "stop_lon":
lst.append(coord[0])
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row['properties']['name'])
lst.append(row_stop_name)
elif col == "stop_id":
lst.append(row['id'])
return self._as_dataframe(id, lat, lon, stop_name)
def _as_dataframe(self, id, lat, lon, stop_name):
df = gpd.GeoDataFrame(data={'x':lon, 'y':lat, 'stop_name':stop_name, 'id':id})
stopsGeoDataFrame = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.x, df.y, crs='EPSG:4326'))
stopsGeoDataFrame.to_crs(crs=self.internal_projection, inplace=True)
return stopsGeoDataFrame
def _find_stops_around_transformed(self, stopsDataFrame, transformedLine, distance):
bufferedLine = transformedLine.buffer(distance)
sindex = stopsDataFrame.sindex
possible_matches_index = list(sindex.intersection(bufferedLine.bounds))
possible_matches = stopsDataFrame.iloc[possible_matches_index]
exact_matches = possible_matches[possible_matches.intersects(bufferedLine)]
return exact_matches
def _convert_to_dataframe(self, stops):
return gpd.GeoDataFrame([[stop.name, stop.lon, stop.lat,
stop.id, Point(self.projection(stop.lon, stop.lat))] for stop in stops], columns = ['stop_name','x','y','id','geometry'], crs=self.internal_projection)
def _sort_by_distance(self, stops, transformedLine):
stops['distance']=stops.apply(lambda row: transformedLine.project(row['geometry']), axis=1)
stops.sort_values('distance', inplace=True)
def is_carpooling_stop(stop_id, name):
stop_name = name.lower()
# mfdz: or bbnavi: prefixed stops are custom stops which are explicitly meant to be carpooling stops
return stop_id.startswith('mfdz:') or stop_id.startswith('bbnavi:') or 'mitfahr' in stop_name or 'p&m' in stop_name

View file

@ -0,0 +1,374 @@
from amarillo.app.models.gtfs import GtfsTimeDelta, GtfsStopTime
from amarillo.app.models.Carpool import MAX_STOPS_PER_TRIP, Carpool, Weekday, StopTime, PickupDropoffType
from amarillo.app.services.gtfs_constants import *
from amarillo.app.services.routing import RoutingService, RoutingException
from amarillo.app.services.stops import is_carpooling_stop
from amarillo.app.utils.utils import assert_folder_exists, is_older_than_days, yesterday, geodesic_distance_in_m
from shapely.geometry import Point, LineString, box
from geojson_pydantic.geometries import LineString as GeoJSONLineString
from datetime import datetime, timedelta
import numpy as np
import os
import json
import logging
logger = logging.getLogger(__name__)
class Trip:
def __init__(self, trip_id, route_name, headsign, url, calendar, departureTime, path, agency, lastUpdated, stop_times, bbox):
if isinstance(calendar, set):
self.runs_regularly = True
self.weekdays = [
1 if Weekday.monday in calendar else 0,
1 if Weekday.tuesday in calendar else 0,
1 if Weekday.wednesday in calendar else 0,
1 if Weekday.thursday in calendar else 0,
1 if Weekday.friday in calendar else 0,
1 if Weekday.saturday in calendar else 0,
1 if Weekday.sunday in calendar else 0,
]
start_in_day = self._total_seconds(departureTime)
else:
self.start = datetime.combine(calendar, departureTime)
self.runs_regularly = False
self.weekdays = [0,0,0,0,0,0,0]
self.start_time = departureTime
self.path = path
self.trip_id = trip_id
self.url = url
self.agency = agency
self.stops = []
self.lastUpdated = lastUpdated
self.stop_times = stop_times
self.bbox = bbox
self.route_name = route_name
self.trip_headsign = headsign
def path_as_line_string(self):
return path
def _total_seconds(self, instant):
return instant.hour * 3600 + instant.minute * 60 + instant.second
def start_time_str(self):
return self.start_time.strftime("%H:%M:%S")
def next_trip_dates(self, start_date, day_count=14):
if self.runs_regularly:
for single_date in (start_date + timedelta(n) for n in range(day_count)):
if self.weekdays[single_date.weekday()]==1:
yield single_date.strftime("%Y%m%d")
else:
yield self.start.strftime("%Y%m%d")
def route_long_name(self):
return self.route_name
def intersects(self, bbox):
return self.bbox.intersects(box(*bbox))
class TripStore():
"""
TripStore maintains the currently valid trips. A trip is a
carpool offer enhanced with all stops this
Attributes:
trips Dict of currently valid trips.
deleted_trips Dict of recently deleted trips.
"""
def __init__(self, stops_store):
self.transformer = TripTransformer(stops_store)
self.stops_store = stops_store
self.trips = {}
self.deleted_trips = {}
self.recent_trips = {}
def put_carpool(self, carpool: Carpool):
"""
Adds carpool to the TripStore.
"""
id = "{}:{}".format(carpool.agency, carpool.id)
filename = f'data/enhanced/{carpool.agency}/{carpool.id}.json'
try:
existing_carpool = self._load_carpool_if_exists(carpool.agency, carpool.id)
if existing_carpool and existing_carpool.lastUpdated == carpool.lastUpdated:
enhanced_carpool = existing_carpool
else:
if len(carpool.stops) < 2 or self.distance_in_m(carpool) < 1000:
logger.warning("Failed to add carpool %s:%s to TripStore, distance too low", carpool.agency, carpool.id)
self.handle_failed_carpool_enhancement(carpool)
return
enhanced_carpool = self.transformer.enhance_carpool(carpool)
# TODO should only store enhanced_carpool, if it has 2 or more stops
assert_folder_exists(f'data/enhanced/{carpool.agency}/')
with open(filename, 'w', encoding='utf-8') as f:
f.write(enhanced_carpool.json())
logger.info("Added enhanced carpool %s:%s", carpool.agency, carpool.id)
return self._load_as_trip(enhanced_carpool)
except RoutingException as err:
logger.warning("Failed to add carpool %s:%s to TripStore due to RoutingException %s", carpool.agency, carpool.id, getattr(err, 'message', repr(err)))
self.handle_failed_carpool_enhancement(carpool)
except Exception as err:
logger.error("Failed to add carpool %s:%s to TripStore.", carpool.agency, carpool.id, exc_info=True)
self.handle_failed_carpool_enhancement(carpool)
def handle_failed_carpool_enhancement(sellf, carpool: Carpool):
assert_folder_exists(f'data/failed/{carpool.agency}/')
with open(f'data/failed/{carpool.agency}/{carpool.id}.json', 'w', encoding='utf-8') as f:
f.write(carpool.json())
def distance_in_m(self, carpool):
if len(carpool.stops) < 2:
return 0
s1 = carpool.stops[0]
s2 = carpool.stops[-1]
return geodesic_distance_in_m((s1.lon, s1.lat),(s2.lon, s2.lat))
def recently_added_trips(self):
return list(self.recent_trips.values())
def recently_deleted_trips(self):
return list(self.deleted_trips.values())
def _load_carpool_if_exists(self, agency_id: str, carpool_id: str):
if carpool_exists(agency_id, carpool_id, 'data/enhanced'):
try:
return load_carpool(agency_id, carpool_id, 'data/enhanced')
except Exception as e:
# An error on restore could be caused by model changes,
# in such a case, it need's to be recreated
logger.warning("Could not restore enhanced trip %s:%s, reason: %s", agency_id, carpool_id, repr(e))
return None
def _load_as_trip(self, carpool: Carpool):
trip = self.transformer.transform_to_trip(carpool)
id = trip.trip_id
self.trips[id] = trip
if not is_older_than_days(carpool.lastUpdated, 1):
self.recent_trips[id] = trip
logger.debug("Added trip %s", id)
return trip
def delete_carpool(self, agency_id: str, carpool_id: str):
"""
Deletes carpool from the TripStore.
"""
agencyScopedCarpoolId = f"{agency_id}:{carpool_id}"
trip_to_be_deleted = self.trips.get(agencyScopedCarpoolId)
if trip_to_be_deleted:
self.deleted_trips[agencyScopedCarpoolId] = trip_to_be_deleted
del self.trips[agencyScopedCarpoolId]
if self.recent_trips.get(agencyScopedCarpoolId):
del self.recent_trips[agencyScopedCarpoolId]
if carpool_exists(agency_id, carpool_id):
remove_carpool_file(agency_id, carpool_id)
logger.debug("Deleted trip %s", id)
def unflag_unrecent_updates(self):
"""
Trips that were last updated before yesterday, are not recent
any longer. As no updates need to be sent for them any longer,
they will be removed from recent recent_trips and deleted_trips.
"""
for key in list(self.recent_trips):
t = self.recent_trips.get(key)
if t and t.lastUpdated.date() < yesterday():
del self.recent_trips[key]
for key in list(self.deleted_trips):
t = self.deleted_trips.get(key)
if t and t.lastUpdated.date() < yesterday():
del self.deleted_trips[key]
class TripTransformer:
REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS = True
REPLACEMENT_STOPS_SERACH_RADIUS_IN_M = 1000
SIMPLIFY_TOLERANCE = 0.0001
router = RoutingService()
def __init__(self, stops_store):
self.stops_store = stops_store
def transform_to_trip(self, carpool):
stop_times = self._convert_stop_times(carpool)
route_name = carpool.stops[0].name + " nach " + carpool.stops[-1].name
headsign= carpool.stops[-1].name
trip_id = self._trip_id(carpool)
path = carpool.path
bbox = box(
min([pt[0] for pt in path.coordinates]),
min([pt[1] for pt in path.coordinates]),
max([pt[0] for pt in path.coordinates]),
max([pt[1] for pt in path.coordinates]))
trip = Trip(trip_id, route_name, headsign, str(carpool.deeplink), carpool.departureDate, carpool.departureTime, carpool.path, carpool.agency, carpool.lastUpdated, stop_times, bbox)
return trip
def _trip_id(self, carpool):
return f"{carpool.agency}:{carpool.id}"
def _replace_stops_by_transit_stops(self, carpool, max_search_distance):
new_stops = []
for carpool_stop in carpool.stops:
new_stops.append(self.stops_store.find_closest_stop(carpool_stop, max_search_distance))
return new_stops
def enhance_carpool(self, carpool):
if self.REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS:
carpool.stops = self._replace_stops_by_transit_stops(carpool, self.REPLACEMENT_STOPS_SERACH_RADIUS_IN_M)
path = self._path_for_ride(carpool)
lineString_shapely_wgs84 = LineString(coordinates = path["points"]["coordinates"]).simplify(0.0001)
lineString_wgs84 = GeoJSONLineString(type="LineString", coordinates=list(lineString_shapely_wgs84.coords))
virtual_stops = self.stops_store.find_additional_stops_around(lineString_wgs84, carpool.stops)
if not virtual_stops.empty:
virtual_stops["time"] = self._estimate_times(path, virtual_stops['distance'])
logger.debug("Virtual stops found: {}".format(virtual_stops))
if len(virtual_stops) > MAX_STOPS_PER_TRIP:
# in case we found more than MAX_STOPS_PER_TRIP, we retain first and last
# half of MAX_STOPS_PER_TRIP
virtual_stops = virtual_stops.iloc[np.r_[0:int(MAX_STOPS_PER_TRIP/2), int(MAX_STOPS_PER_TRIP/2):]]
trip_id = f"{carpool.agency}:{carpool.id}"
stop_times = self._stops_and_stop_times(carpool.departureTime, trip_id, virtual_stops)
enhanced_carpool = carpool.copy()
enhanced_carpool.stops = stop_times
enhanced_carpool.path = lineString_wgs84
return enhanced_carpool
def _convert_stop_times(self, carpool):
stop_times = [GtfsStopTime(
self._trip_id(carpool),
stop.arrivalTime,
stop.departureTime,
stop.id,
seq_nr+1,
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_dropoff else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_pickup else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
STOP_TIMES_TIMEPOINT_APPROXIMATE)
for seq_nr, stop in enumerate(carpool.stops)]
return stop_times
def _path_for_ride(self, carpool):
points = self._stop_coords(carpool.stops)
return self.router.path_for_stops(points)
def _stop_coords(self, stops):
# Retrieve coordinates of all officially announced stops (start, intermediate, target)
return [Point(stop.lon, stop.lat) for stop in stops]
def _estimate_times(self, path, distances_from_start):
cumulated_distance = 0
cumulated_time = 0
stop_times = []
instructions = path["instructions"]
cnt = 0
instr_distance = instructions[cnt]["distance"]
instr_time = instructions[cnt]["time"]
for distance in distances_from_start:
while cnt < len(instructions) and cumulated_distance + instructions[cnt]["distance"] < distance:
cumulated_distance = cumulated_distance + instructions[cnt]["distance"]
cumulated_time = cumulated_time + instructions[cnt]["time"]
cnt = cnt + 1
if cnt < len(instructions):
if instructions[cnt]["distance"] ==0:
raise RoutingException("Origin and destinaction too close")
percent_dist = (distance - cumulated_distance) / instructions[cnt]["distance"]
stop_time = cumulated_time + percent_dist * instructions[cnt]["time"]
stop_times.append(stop_time)
else:
logger.debug("distance {} exceeds total length {}, using max arrival time {}".format(distance, cumulated_distance, cumulated_time))
stop_times.append(cumulated_time)
return stop_times
def _stops_and_stop_times(self, start_time, trip_id, stops_frame):
# Assumptions:
# arrival_time = departure_time
# pickup_type, drop_off_type for origin: = coordinate/none
# pickup_type, drop_off_type for destination: = none/coordinate
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
number_of_stops = len(stops_frame.index)
total_distance = stops_frame.iloc[number_of_stops-1]["distance"]
first_stop_time = GtfsTimeDelta(hours = start_time.hour, minutes = start_time.minute, seconds = start_time.second)
stop_times = []
seq_nr = 0
for i in range(0, number_of_stops):
current_stop = stops_frame.iloc[i]
if not current_stop.id:
continue
elif i == 0:
if (stops_frame.iloc[1].time-current_stop.time) < 1000:
# skip custom stop if there is an official stop very close by
logger.debug("Skipped stop %s", current_stop.id)
continue
else:
if (current_stop.time-stops_frame.iloc[i-1].time) < 5000 and not i==1 and not is_carpooling_stop(current_stop.id, current_stop.stop_name):
# skip latter stop if it's very close (<5 seconds drive) by the preceding
logger.debug("Skipped stop %s", current_stop.id)
continue
trip_time = timedelta(milliseconds=int(current_stop.time))
is_dropoff = self._is_dropoff_stop(current_stop, total_distance)
is_pickup = self._is_pickup_stop(current_stop, total_distance)
# TODO would be nice if possible to publish a minimum shared distance
pickup_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_pickup else STOP_TIMES_STOP_TYPE_NONE
dropoff_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_dropoff else STOP_TIMES_STOP_TYPE_NONE
if is_pickup and not is_dropoff:
pickup_dropoff = PickupDropoffType.only_pickup
elif not is_pickup and is_dropoff:
pickup_dropoff = PickupDropoffType.only_dropoff
else:
pickup_dropoff = PickupDropoffType.pickup_and_dropoff
next_stop_time = first_stop_time + trip_time
seq_nr += 1
stop_times.append(StopTime(**{
'arrivalTime': str(next_stop_time),
'departureTime': str(next_stop_time),
'id': current_stop.id,
'pickup_dropoff': pickup_dropoff,
'name': str(current_stop.stop_name),
'lat': current_stop.y,
'lon': current_stop.x
}))
return stop_times
def _is_dropoff_stop(self, current_stop, total_distance):
return current_stop["distance"] >= 0.5 * total_distance
def _is_pickup_stop(self, current_stop, total_distance):
return current_stop["distance"] < 0.5 * total_distance
def load_carpool(agency_id: str, carpool_id: str, folder: str ='data/enhanced') -> Carpool:
with open(f'{folder}/{agency_id}/{carpool_id}.json', 'r', encoding='utf-8') as f:
dict = json.load(f)
carpool = Carpool(**dict)
return carpool
def carpool_exists(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
return os.path.exists(f"{folder}/{agency_id}/{carpool_id}.json")
def remove_carpool_file(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
return os.remove(f"{folder}/{agency_id}/{carpool_id}.json")

View file

@ -1,4 +1,4 @@
from amarillo.models.Carpool import Carpool, StopTime, Weekday from amarillo.app.models.Carpool import Carpool, StopTime, Weekday
# TODO use meanigful values for id and lat, lon # TODO use meanigful values for id and lat, lon
stops_1234 = [ stops_1234 = [

View file

View file

@ -0,0 +1,40 @@
import os
import re
from datetime import datetime, date, timedelta
from pyproj import Geod
def assert_folder_exists(foldername):
if not os.path.isdir(foldername):
os.makedirs(foldername)
def agency_carpool_ids_from_filename(carpool_filename):
"""
Returns agency_id, carpool_id from a carpool filename.
It is assumed, that carpool_filename matches the regex
/([a-zA-Z0-9_-]+)/([a-zA-Z0-9_-]+).json$
"""
m = re.search(r'\/([a-zA-Z0-9_-]+)\/([a-zA-Z0-9_-]+)\.json$', carpool_filename)
if m:
return m[1], m[2]
else:
return None, None
def is_older_than_days(date_to_check, number_of_days):
if date_to_check is None:
return True
if isinstance(date_to_check, datetime):
date_to_check = date_to_check.date()
return date_to_check < date_days_ago(number_of_days)
def yesterday():
return date_days_ago(1)
def date_days_ago(number_of_days):
return date.today() - timedelta(days=number_of_days)
def geodesic_distance_in_m(coord1, coord2):
geod = Geod(ellps="WGS84")
lons = [coord1[0], coord2[0]]
lats = [coord1[1], coord2[1]]
return geod.line_lengths(lons, lats)[0]

View file

@ -1,38 +0,0 @@
from typing import Annotated, Optional, List
from pydantic import ConfigDict, BaseModel, Field
class User(BaseModel):
#TODO: add attributes admin, permissions, fullname, email
user_id: str = Field(
description="ID of the agency that uses this token.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
api_key: Optional[str] = Field(None,
description="The agency's API key for using the API",
min_length=20,
max_length=256,
pattern=r'^[a-zA-Z0-9]+$',
examples=["d8yLuY4DqMEUCLcfJASi"])
password: Optional[str] = Field(None,
description="The agency's password for generating JWT tokens",
min_length=8,
max_length=256,
examples=["$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"])
permissions: Optional[List[Annotated[str, Field(pattern=r'^[a-z0-9-]+(:[a-z]+)?$')]]] = Field([],
description="The permissions of this user, a list of strings in the format <agency:operation> or <operation>",
max_length=256,
# pattern=r'^[a-zA-Z0-9]+(:[a-zA-Z]+)?$', #TODO
examples=["ride2go:read", "all:read", "admin", "geojson"])
model_config = ConfigDict(json_schema_extra={
"title": "Agency Configuration",
"description": "Configuration for an agency.",
"example":
{
"agency_id": "mfdz",
"api_key": "d8yLuY4DqMEUCLcfJASi",
"password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"
}
})

View file

@ -0,0 +1,121 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Test notebook for discovering and importing plugins"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'amarillo.plugins.metrics': <module 'amarillo.plugins.metrics' from '/home/user/amarillo/amarillo-plugins/amarillo-metrics/amarillo/plugins/metrics/__init__.py'>}"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import importlib\n",
"import pkgutil\n",
"\n",
"import amarillo.plugins # FIXME this namespace does not exist if there are 0 plugins installed\n",
"\n",
"def iter_namespace(ns_pkg):\n",
" # Source: https://packaging.python.org/guides/creating-and-discovering-plugins/\n",
" return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + \".\")\n",
"\n",
"discovered_plugins = {\n",
" name: importlib.import_module(name)\n",
" for finder, name, ispkg\n",
" in iter_namespace(amarillo.plugins)\n",
"}\n",
"\n",
"discovered_plugins"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['__name__',\n",
" '__doc__',\n",
" '__package__',\n",
" '__loader__',\n",
" '__spec__',\n",
" '__path__',\n",
" '__file__',\n",
" '__cached__',\n",
" '__builtins__',\n",
" 'metrics',\n",
" 'json',\n",
" 'logging',\n",
" 'os',\n",
" 'random',\n",
" 'Callable',\n",
" 'APIRouter',\n",
" 'HTTPException',\n",
" 'Depends',\n",
" 'Request',\n",
" 'datetime',\n",
" 'generate_latest',\n",
" 'Gauge',\n",
" 'Counter',\n",
" 'Info',\n",
" 'FastAPI',\n",
" 'HTTPBasic',\n",
" 'HTTPBasicCredentials',\n",
" 'PlainTextResponse',\n",
" 'secrets',\n",
" 'logger',\n",
" 'security',\n",
" 'amarillo_trips_number_total',\n",
" 'router']"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"metrics = discovered_plugins['amarillo.plugins.metrics']\n",
"\n",
"metrics.__dir__()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View file

@ -1,55 +0,0 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Region
from amarillo.services.regions import RegionService
from amarillo.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region

View file

@ -1,71 +0,0 @@
import logging
from typing import List
from fastapi import APIRouter, HTTPException, status, Header, Depends
from amarillo.models.User import User
from amarillo.services.users import UserService
from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.services.config import config
from amarillo.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/users",
tags=["users"]
)
# This endpoint is not shown in PROD installations, only in development
# TODO make this an explicit config option
include_in_schema = config.env != 'PROD'
@router.get("/",
include_in_schema=include_in_schema,
operation_id="getUserIdsWhichHaveAConfiguration",
summary="Get user which have a configuration",
response_model=List[str],
description="Returns the user_ids but not the details.",
status_code=status.HTTP_200_OK)
async def get_user_ids(requesting_user: User = Depends(get_current_user)) -> [str]:
return container['users'].get_user_ids()
@router.post("/",
include_in_schema=include_in_schema,
operation_id="postNewUserConf",
summary="Post a new User")
async def post_user_conf(user_conf: User, requesting_user: User = Depends(get_current_user)):
verify_permission("admin", requesting_user)
user_service: UserService = container['users']
user_service.add(user_conf)
# TODO 400->403
@router.delete("/{user_id}",
include_in_schema=include_in_schema,
operation_id="deleteUser",
status_code=status.HTTP_200_OK,
summary="Delete configuration of a user. Returns true if the token for the user existed, "
"false if it didn't exist."
)
async def delete_user(user_id: str, requesting_user: User = Depends(get_current_user)):
user_may_delete_own = requesting_user.user_id == user_id
admin_may_delete_everything = "admin" in requesting_user.permissions
is_permitted = user_may_delete_own or admin_may_delete_everything
if not is_permitted:
message = f"User '{requesting_user.user_id} can not delete the configuration for {user_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_service: UserService = container['users']
agency_exists = user_id in user_service.get_user_ids()
if not agency_exists:
message = f"No config for {user_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_service.delete(user_id)

View file

@ -1,75 +0,0 @@
import time
mock_added = {
'trip': {
'tripId': 'mifaz:carpool-update-123',
'startTime': '07:33:00',
'startDate': '20220509',
'scheduleRelationship': 'ADDED',
'routeId': 'mifaz:carpool-update-123',
'[transit_realtime.trip_descriptor]': {
'routeUrl' : 'http://myurl',
'agencyId' : 'mifaz',
'route_long_name' : 'Angermünde nach Biesenbrow'}
},
'stopTimeUpdate': [{
'stopSequence': 1,
'arrival': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'de:12073:900340108',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER'
}
}
},
{
'stopSequence': 2,
'arrival': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'mfdz:Ang001',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER',
'pickupType': 'NONE'
}
}
}]
}
mock_trip_updated_added = {
'id': 'mifaz:carpool-update-123',
'tripUpdate': mock_added
}
mock_trip_updated_deleted = {
'id': 'carpool-update-124',
'tripUpdate': {
'trip': {
'tripId': '141',
'startTime': '17:01:08',
'startDate': '20220509',
'scheduleRelationship': 'CANCELED',
'routeId': '141'
}
}
}

View file

@ -1,165 +0,0 @@
# OAuth2 authentication based on https://fastapi.tiangolo.com/tutorial/security/oauth2-jwt/#__tabbed_4_2
from datetime import datetime, timedelta, timezone
from typing import Annotated, Optional, Union
import logging
import logging.config
from fastapi import Depends, HTTPException, Header, status, APIRouter
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from jose import JWTError, jwt
from pydantic import BaseModel
from amarillo.models.User import User
from amarillo.services.passwords import verify_password
from amarillo.utils.container import container
from amarillo.services.agencies import AgencyService
from amarillo.services.users import UserService
from amarillo.models.Carpool import Agency
from amarillo.services.secrets import secrets
SECRET_KEY = secrets.secret_key
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 7*24*60
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main")
router = APIRouter()
class Token(BaseModel):
access_token: str
token_type: str
class TokenData(BaseModel):
user_id: Union[str, None] = None
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
async def verify_optional_api_key(X_API_Key: Optional[str] = Header(None)):
if X_API_Key == None: return None
return await verify_api_key(X_API_Key)
def authenticate_user(user_id: str, password: str):
user_service : UserService = container['users']
user_conf = user_service.user_id_to_user_conf.get(user_id, None)
if not user_conf:
return False
if not verify_password(password, user_conf.password):
return False
return user_id
def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None):
to_encode = data.copy()
if expires_delta:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
return encoded_jwt
async def get_current_user(token: str = Depends(oauth2_scheme), user_from_api_key: str = Depends(verify_optional_api_key)) -> User:
if token:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate OAuth2 credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
user_id: str = payload.get("sub")
if user_id is None:
raise credentials_exception
token_data = TokenData(user_id=user_id)
except JWTError:
raise credentials_exception
user_id = token_data.user_id
if user_id is None:
raise credentials_exception
user_service : UserService = container['users']
return user_service.get_user(user_id)
elif user_from_api_key:
logger.info(f"API Key provided: {user_from_api_key}")
user_service : UserService = container['users']
return user_service.get_user(user_from_api_key)
else:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
raise credentials_exception
def verify_permission(permission: str, user: User):
def permissions_exception():
return HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=f"User '{user.user_id}' does not have the permission '{permission}'",
headers={"WWW-Authenticate": "Bearer"},
)
#user is admin
if "admin" in user.permissions: return
#permission is an operation
if ":" not in permission:
if permission not in user.permissions:
raise permissions_exception()
return
#permission is in agency:operation format
def permission_matches(permission, user_permission):
prescribed_agency, prescribed_operation = permission.split(":")
given_agency, given_operation = user_permission.split(":")
return (prescribed_agency == given_agency or given_agency == "all") and (prescribed_operation == given_operation or given_operation == "all")
if any(permission_matches(permission, p) for p in user.permissions if ":" in p): return
raise permissions_exception()
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_api_key(X_API_Key: str = Header(...)):
user_service: UserService = container['users']
return user_service.check_api_key(X_API_Key)
@router.post("/token")
async def login_for_access_token(
form_data: Annotated[OAuth2PasswordRequestForm, Depends()]
) -> Token:
agency = authenticate_user(form_data.username, form_data.password)
if not agency:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"sub": agency}, expires_delta=access_token_expires
)
return Token(access_token=access_token, token_type="bearer")
# TODO: eventually remove this
@router.get("/users/me/", response_model=Agency)
async def read_users_me(
current_agency: Annotated[Agency, Depends(get_current_user)]
):
agency_service : AgencyService = container['agencies']
return agency_service.get_agency(agency_id=current_agency)
# TODO: eventually remove this
@router.get("/users/me/items/")
async def read_own_items(
current_agency: Annotated[str, Depends(get_current_user)]
):
return [{"item_id": "Foo", "owner": current_agency}]

View file

@ -1,10 +0,0 @@
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def verify_password(plain_password, hashed_password):
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password):
return pwd_context.hash(password)

View file

@ -1,115 +0,0 @@
import json
import os
from glob import glob
from typing import Dict, List
import logging
from fastapi import HTTPException, status
from amarillo.models.User import User
from amarillo.services.config import config
from amarillo.services.passwords import get_password_hash
logger = logging.getLogger(__name__)
user_conf_directory = 'data/users'
class UserService:
def __init__(self):
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
self.user_id_to_user_conf: Dict[str, User] = {}
self.api_key_to_user_id: Dict[str, str] = {}
for user_conf_file_name in glob(f'{user_conf_directory}/*.json'):
with open(user_conf_file_name) as user_conf_file:
dictionary = json.load(user_conf_file)
user_conf = User(**dictionary)
agency_id = user_conf.user_id
api_key = user_conf.api_key
self.user_id_to_user_conf[agency_id] = user_conf
if api_key is not None:
self.api_key_to_user_id[api_key] = user_conf.user_id
def get_user(self, user_id: str) -> User:
user_conf = self.user_id_to_user_conf.get(user_id)
return user_conf
def check_api_key(self, api_key: str) -> str:
"""Check if the API key is valid
The agencies' api keys are checked first, and the admin's key.
The agency_id or "admin" is returned for further checks in the caller if the
request is permitted, like {agency_id} == agency_id.
"""
agency_id = self.api_key_to_user_id.get(api_key)
is_agency = agency_id is not None
if is_agency:
return agency_id
is_admin = api_key == config.admin_token
if is_admin:
return "admin"
message = "X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=400, detail=message)
def add(self, user_conf: User):
user_id = user_conf.user_id
api_key = user_conf.api_key
agency_id_exists_already = self.user_id_to_user_conf.get(user_id) is not None
if agency_id_exists_already:
message = f"Agency {user_id} exists already. To update, delete it first."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_using_this_api_key_already = self.api_key_to_user_id.get(api_key)
a_different_agency_is_using_this_api_key_already = \
agency_using_this_api_key_already is not None and \
agency_using_this_api_key_already != user_id
if a_different_agency_is_using_this_api_key_already:
message = f"Duplicate API Key for {user_id} not permitted. Use a different key."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_conf.password = get_password_hash(user_conf.password)
with open(f'{user_conf_directory}/{user_id}.json', 'w', encoding='utf-8') as f:
f.write(user_conf.json())
self.user_id_to_user_conf[user_id] = user_conf
self.api_key_to_user_id[api_key] = user_id
logger.info(f"Added configuration for user {user_id}.")
def get_user_ids(self) -> List[str]:
return list(self.user_id_to_user_conf.keys())
def delete(self, user_id):
user_conf = self.user_id_to_user_conf.get(user_id)
api_key = user_conf.api_key
del self.api_key_to_user_id[api_key]
del self.user_id_to_user_conf[user_id]
os.remove(f'{user_conf_directory}/{user_id}.json')
logger.info(f"Deleted configuration for {user_id}.")

View file

@ -1,5 +0,0 @@
[
{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/stops_zhv.csv", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/parkings_osm.csv", "vicinity": 500}
]

View file

@ -1,28 +0,0 @@
[loggers]
keys=root
[handlers]
keys=consoleHandler, fileHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=INFO
handlers=consoleHandler, fileHandler
propagate=yes
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleFormatter
args=(sys.stdout,)
[handler_fileHandler]
class=handlers.RotatingFileHandler
level=ERROR
formatter=simpleFormatter
args=('error.log', 'a', 1000000, 3) # Filename, mode, maxBytes, backupCount
[formatter_simpleFormatter]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s

View file

@ -1,33 +0,0 @@
.request {
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
font-weight: bold;
border: 1px solid gray;
border-radius: 5px;
padding: 10px;
font-size: 24px;
}
.get {
color: #2b542c;
background-color: #beffbd;
}
.post {
color: #ae5900;
background-color: #ffc79d;
}
.response_formats span {
font-weight: bold;
color: darkred;
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
}
pre {
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
}
ul li {
font-size: 18px;
margin-bottom: 10px;
}

View file

@ -1,186 +0,0 @@
@import url(//fonts.googleapis.com/css?family=Open+Sans:300,400,600,700);
body {
font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
font-weight: 300;
color: black;
background: white;
padding-left: 20px;
padding-right: 20px;
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
font-weight: 300;
}
p {
font-weight: 300;
}
.font-normal {
font-weight: 400;
}
.font-semi-bold {
font-weight: 600;
}
.font-bold {
font-weight: 700;
}
.starter-template {
margin-top: 25px;
}
.starter-template .content {
margin-left: 10px;
}
.starter-template .content h1 {
margin-top: 10px;
font-size: 60px;
}
.starter-template .content h1 .smaller {
font-size: 40px;
}
.starter-template .content .lead {
font-size: 25px;
}
.starter-template .links {
float: right;
right: 0;
margin-top: 125px;
}
.starter-template .links ul {
display: block;
padding: 0;
margin: 0;
}
.starter-template .links ul li {
list-style: none;
display: inline;
margin: 0 10px;
}
.starter-template .links ul li:first-child {
margin-left: 0;
}
.starter-template .links ul li:last-child {
margin-right: 0;
}
.starter-template .links ul li.current-version {
font-weight: 400;
}
.starter-template .links ul li a, a {
text-decoration: underline;
}
.starter-template .links ul li a:hover, a:hover {
text-decoration: underline;
}
.starter-template .links ul li .icon-muted {
margin-right: 5px;
}
.starter-template .copyright {
margin-top: 10px;
font-size: 0.9em;
text-transform: lowercase;
float: right;
right: 0;
}
@media (max-width: 1199px) {
.starter-template .content h1 {
font-size: 45px;
}
.starter-template .content h1 .smaller {
font-size: 30px;
}
.starter-template .content .lead {
font-size: 20px;
}
}
@media (max-width: 991px) {
.starter-template {
margin-top: 0;
}
.starter-template .logo {
margin: 40px auto;
}
.starter-template .content {
margin-left: 0;
text-align: center;
}
.starter-template .content h1 {
margin-bottom: 20px;
}
.starter-template .links {
float: none;
text-align: center;
margin-top: 60px;
}
.starter-template .copyright {
float: none;
text-align: center;
}
}
@media (max-width: 767px) {
.starter-template .content h1 .smaller {
font-size: 25px;
display: block;
}
.starter-template .content .lead {
font-size: 16px;
}
.starter-template .links {
margin-top: 40px;
}
.starter-template .links ul li {
display: block;
margin: 0;
}
.starter-template .links ul li .icon-muted {
display: none;
}
.starter-template .copyright {
margin-top: 20px;
}
}
.disclaimer {
margin-top: 20px;
font-style: italic;
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View file

@ -1,34 +0,0 @@
{% extends "shared/layout.html" %}
{% block content %}
<h1>
Amarillo - Der Carpooling-Intermediär
</h1>
<h2>Hintergrund</h2>
<p>
Ein "Amarillo" ist ein <a href="https://www.cubatravelnetwork.com/de/autoverleih-in-kuba/autofahren-auf-kuba">gelb gekleideter Verkehrsregler</a>, der auf Kuba Trampern eine möglichst passsende Mitfahrgelegenheit vermittelt.
</p>
<p>
Ebenso wie ein kubanischer Amarillo hilft auch dieser Amarillo-Dienst, passende Mitfahrgelegenheiten zu vermitteln. Mitfahrplattformen können Mitfahrangebote ihrer
Nutzerinnen über die Amarillo-REST-API publizieren und so Reiseplanern oder Meta-Suchmaschinen zur Verfügung stellen.
</p>
<p>
Amarillo reichert diese Mitfahrangebote um die mutmaßliche Fahrtroute sowie entlang
dieser liegenden Zustiegs- und Ausstiegspunkte an, so dass Reiseplaner für Mitfahrene gut erreichbare Treffpunkte vorschlagen können, die mit geringen Umwegen angefahren werden können, wie z.B. Parken-und-Mitfahren-Parkplätze, Bahnhöfe oder Bushaltestellen.
</p>
<p>
Für die einfache Integration in Reiseplaner stellt Amarillo die Mitfahrangebote aller
publizierenen Mitfahrbörsen nächtlich als regionale GTFS-Feeds bzw. minütlich als GTFS-RT Feeds bereit. Der GTFS-RT-Feed enthält alle seit dem Vortag um Mitternacht hinzugefügten, aktualisierten oder gelöschten Mitfahrangebote als ADDED bzw. CANCELED TripUpdates. Die amarillo-spezifischen Erweiterungen werden in dieser <a href="https://github.com/mfdz/amarillo/blob/main/app/proto/realtime_extension.proto">Prototype-Extension</a> definiert.
</p>
<p>
Die DigitalAgentur Brandenburg GmbH als Betreiber dieses Amarillo-Dienstes für die kommunale Mobilitätsplattform <a href="https://bbnavi.de">bbnavi.de</a> und das Mitfahrangebot <a href="https://mitfahrenbb.de">mitfahrenbb.de</a> ist offen für die Aufnahme weiterer Mitfahrplattformen sowohl für die Integration von Mitfahrangeboten über Amarillo als auch für das Aufgeben von Angeboten über unsere Seiten. Möchte die Mitfahrplatfform minutenaktuelle Mitfahrangebote bereitstellen, sollte sie die carpool-POST/PUT/DELETE-Endpunkte für jede Aktualisierung eines Inserats aufrufen. Bei Interesse an einer Integration bzw. um den hierfür notwendigen API-Key zu erhalten, nehmen Sie gerne über Kontakt bbnavi at digital-agentur punkt de mit uns auf.
</p>
<h3>Dokumentation</h3>
<p>
Die Dokumentation dieses Dienstes ist als <a href="/docs">OpenAPI-Dokumentation</a> verfügbar.
Der Quellcode ist unter AGPL-Lizenz in diesem <a href="https://github.com/mfdz/amarillo">Github-Repository</a> einseh- und abrufbar.
</p>
{% endblock %}

View file

@ -1,96 +0,0 @@
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Amarillo</title>
<style type="text/css">
body {
margin: 1rem;
font-size: 100%;
font-family: sans-serif;
line-height: 1.35;
}
header {
margin-bottom: 2rem;
}
footer {
margin-top: 5rem;
}
a {
text-decoration: none;
}
a:hover, a:focus, a:active {
text-decoration: underline;
}
a, a:visited {
color: #da1b1b;
}
h1, h2, h3 {
font-weight: 500;
margin-bottom: .8em;
}
header {
padding-bottom: 1em;
border-bottom: 1px solid #ddd;
}
#bbnavi-logo {
display: inline-block;
text-decoration: none;
font-size: 2rem;
}
#bbnavi-logo svg {
display: inline-block;
width: 3rem;
vertical-align: bottom;
}
#barshare-logo {
max-height: 3rem;
vertical-align: text-bottom;
position: relative;
top: -.3rem;
}
footer {
padding-top: 1em;
border-top: 1px solid #ddd;
}
footer > :first-child {
margin-top: 0;
}
footer ul {
list-style: none;
padding-left: 0;
}
</style>
</head>
<body>
<header>
<a id="bbnavi-logo" href="https://bbnavi.de/">
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 75 75">
<defs>
<style>
.cls-1{fill: #da1b1b;}.cls-2{fill: #fff;}
</style>
</defs>
<rect class="cls-1" x="5" y="5" width="65" height="65" rx="10"/>
<path class="cls-2" d="M41,48.91l7.17-20.44,1.05-3a12.77,12.77,0,0,0,.25-2.5,12.15,12.15,0,1,0-24.29,0,12.35,12.35,0,0,0,.32,2.81l.84,2.38,7.28,20.75C19,49.42,7.81,52.62,7.81,56.51c0,4.22,13.29,7.65,29.69,7.65s29.69-3.43,29.69-7.65C67.19,52.58,55.72,49.35,41,48.91ZM37.05,61c-12.92,0-23.39-2.12-23.39-4.73,0-2.44,9.12-4.44,20.84-4.7L36.41,57a.9.9,0,0,0,1.69,0L40,51.59c11.53.3,20.44,2.28,20.44,4.69C60.44,58.89,50,61,37.05,61Z"/>
</svg>
Amarillo
</a>
</header>
{% block content %}
<div class="content">
THIS PAGE HAS NO CONTENT
</div>
{% endblock %}
<footer>
<ul>
<li><a href="https://bbnavi.de/impressum/">Impressum</a></li>
<li><a href="https://bbnavi.de/datenschutzerklaerung/">Datenschutz</a></li>
</ul>
</footer>
</body>
</html>

View file

@ -1,38 +0,0 @@
from fastapi.testclient import TestClient
from amarillo.main import app
from amarillo.tests.sampledata import carpool_1234, data1
client = TestClient(app)
# TODO FG: This test needs a clean temporary storage folder, not the hard coded data dir.
def test_doc():
response = client.get("/openapi.json")
assert response.status_code == 200
def test_get_mfdz_0():
response = client.get("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with agency mfdz and id 0 does not exist."}
def test_delete_mfdz_0():
response = client.delete("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with id 0 does not exist."}
def test_post():
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The carpool should not exist yet"
response = client.post("/carpool/", json=data1)
assert response.status_code == 200, "The first post must work with 200"
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "After post, the get must work"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "The first delete must work with 200"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The second delete must fail"

View file

@ -1,37 +0,0 @@
from fastapi import HTTPException
import pytest
from amarillo.services.oauth2 import verify_permission
from amarillo.models.User import User
test_user = User(user_id="test", password="testpassword", permissions=["all:read", "mfdz:write", "ride2go:all", "gtfs"])
admin_user = User(user_id="admin", password="testpassword", permissions=["admin"])
def test_operation():
verify_permission("gtfs", test_user)
with pytest.raises(HTTPException):
verify_permission("geojson", test_user)
def test_agency_permission():
verify_permission("mvv:read", test_user)
verify_permission("mfdz:read", test_user)
verify_permission("mfdz:write", test_user)
verify_permission("ride2go:write", test_user)
with pytest.raises(HTTPException):
verify_permission("mvv:write", test_user)
verify_permission("mvv:all", test_user)
def test_admin():
verify_permission("admin", admin_user)
verify_permission("gtfs", admin_user)
verify_permission("all:all", admin_user)
verify_permission("mvv:all", admin_user)
verify_permission("mfdz:read", admin_user)
verify_permission("mfdz:write", admin_user)
verify_permission("ride2go:write", admin_user)
with pytest.raises(HTTPException):
verify_permission("admin", test_user)
verify_permission("all:all", test_user)

View file

@ -1,74 +0,0 @@
import os
import re
import shutil
from pathlib import Path
import logging
from datetime import datetime, date, timedelta
from pyproj import Geod
logger = logging.getLogger(__name__)
#logging.conf may not exist yet, so we need to configure the logger to show infos
logging.basicConfig(level=logging.INFO)
def assert_folder_exists(foldername):
if not os.path.isdir(foldername):
os.makedirs(foldername)
def agency_carpool_ids_from_filename(carpool_filename):
"""
Returns agency_id, carpool_id from a carpool filename.
It is assumed, that carpool_filename matches the regex
/([a-zA-Z0-9_-]+)/([a-zA-Z0-9_-]+).json$
"""
m = re.search(r'\/([a-zA-Z0-9_-]+)\/([a-zA-Z0-9_-]+)\.json$', carpool_filename)
if m:
return m[1], m[2]
else:
return None, None
def is_older_than_days(date_to_check, number_of_days):
if date_to_check is None:
return True
if isinstance(date_to_check, datetime):
date_to_check = date_to_check.date()
return date_to_check < date_days_ago(number_of_days)
def yesterday():
return date_days_ago(1)
def date_days_ago(number_of_days):
return date.today() - timedelta(days=number_of_days)
def geodesic_distance_in_m(coord1, coord2):
geod = Geod(ellps="WGS84")
lons = [coord1[0], coord2[0]]
lats = [coord1[1], coord2[1]]
return geod.line_lengths(lons, lats)[0]
def copy_static_files(files_and_dirs_to_copy):
amarillo_dir = Path(__file__).parents[1]
source_dir = os.path.join(amarillo_dir, "static")
destination_dir = os.getcwd()
for item in files_and_dirs_to_copy:
source_path = os.path.join(source_dir, item)
destination_path = os.path.join(destination_dir, item)
if not os.path.exists(source_path):
raise FileNotFoundError(source_path)
if os.path.exists(destination_path):
# logger.info(f"{item} already exists")
continue
if os.path.isfile(source_path):
shutil.copy2(source_path, destination_path)
logger.info(f"Copied {item} to {destination_path}")
if os.path.isdir(source_path):
shutil.copytree(source_path, destination_path)
logger.info(f"Copied directory {item} and its contents to {destination_path}")

View file

@ -1,17 +0,0 @@
import fastapi
from starlette.requests import Request
from starlette.templating import Jinja2Templates
templates = Jinja2Templates('templates')
router = fastapi.APIRouter()
@router.get('/', include_in_schema=False)
async def index(request: Request):
return templates.TemplateResponse('home/index.html', {"request": request})
@router.get('/favicon.ico', include_in_schema=False)
def favicon():
return fastapi.responses.RedirectResponse(url='/static/img/favicon.ico')

View file

@ -1,5 +1,3 @@
# Bounding-Box Germany # Bounding-Box Germany
ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }' ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }'
env = 'PROD' env = 'PROD'
graphhopper_base_url = 'https://api.mfdz.de/gh'
stop_sources_file = 'conf/stop_sources.json'

22
logging.conf Normal file
View file

@ -0,0 +1,22 @@
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=INFO
handlers=consoleHandler
propagate=yes
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleFormatter
args=(sys.stdout,)
[formatter_simpleFormatter]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s

View file

@ -1,25 +1,20 @@
[project] [project]
name = "amarillo" name = "amarillo-core"
version = "0.0.15a4" version = "0.0.7"
description = "Aggregates and enhances carpooling-offers and publishes them as GTFS(-RT)"
readme = "README.md"
license = {file = "LICENSE"}
keywords = ["amarillo", "ridesharing", "carpooling", "gtfs", "gtfs-rt"]
dependencies = [ dependencies = [
"fastapi[all]==0.109.0", "fastapi[all]==0.104.0",
"geopandas==0.14", "geopandas==0.14",
"uvicorn[standard]==0.23.2", "uvicorn[standard]==0.23.2",
"pydantic[dotenv]==2.4.2", "pydantic[dotenv]==2.4.2",
"protobuf==3.20.3", "protobuf==3.20.3",
"starlette~=0.35", "starlette",
"requests==2.31.0", "requests==2.31.0",
"pyproj==3.6.1", "pyproj==3.6.1",
"geojson-pydantic==1.0.1", "geojson-pydantic==1.0.1",
"watchdog==3.0.0",
"python-jose[cryptography]",
"bcrypt==4.0.1",
"passlib[bcrypt]"
] ]
[tool.setuptools.packages] [tool.setuptools.packages.find]
find = {} where = ["."]
include = ["amarillo"]
exclude = ["data"]
namespaces = true

View file

@ -1,13 +1,9 @@
fastapi[all]==0.109.0 fastapi[all]==0.104.0
geopandas==0.14 geopandas==0.14
uvicorn[standard]==0.23.2 uvicorn[standard]==0.23.2
pydantic[dotenv]==2.4.2 pydantic[dotenv]==2.4.2
protobuf==3.20.3 protobuf==3.20.3
starlette~=0.35 starlette
requests==2.31.0 requests==2.31.0
pyproj==3.6.1 pyproj==3.6.1
geojson-pydantic==1.0.1 geojson-pydantic==1.0.1
pytest
python-jose[cryptography]
bcrypt==4.0.1
passlib[bcrypt]