Compare commits
43 commits
docker-bui
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
797eff5b2a | ||
|
|
b9b47dfc2a | ||
|
|
a04397f59d | ||
|
|
c8acc46382 | ||
|
|
11d5849290 | ||
|
|
66cc746937 | ||
|
|
6af3250bea | ||
|
|
6f019020ea | ||
|
|
3ed38a959d | ||
|
|
b1aeac37df | ||
|
|
206f93ddde | ||
|
|
acd06b522a | ||
|
|
abf5b071a4 | ||
|
|
e81dbbc39c | ||
|
|
bbba8de7ac | ||
|
|
c03d5b1232 | ||
|
|
319be9f803 | ||
|
|
c521fe5a9a | ||
|
|
85aaa1e6b1 | ||
|
|
5aee8b9ecf | ||
|
|
08e0c545a1 | ||
|
|
a033ac86c4 | ||
|
|
1d3150918b | ||
|
|
c6aa3e7b8d | ||
|
|
3de2621b90 | ||
|
|
ddca809530 | ||
|
|
2e453a71f3 | ||
|
|
1fa65f97b4 | ||
|
|
20ca275a48 | ||
|
|
25ac460836 | ||
|
|
ebbbf28432 | ||
|
|
4d909ee62c | ||
|
|
38e33e461c | ||
|
|
89da3ddb88 | ||
|
|
8b9fd6ab25 | ||
|
|
dc9265e6fb | ||
|
|
7d7cbacf00 | ||
|
|
e638300ba5 | ||
|
|
18c015a3b6 | ||
|
|
49459b8f70 | ||
|
|
37b7075e30 | ||
|
|
0c31ec731b | ||
|
|
2b37f68228 |
54
.gitignore
vendored
54
.gitignore
vendored
|
|
@ -1,4 +1,3 @@
|
|||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
|
@ -21,6 +20,7 @@ parts/
|
|||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
|
|
@ -50,7 +50,6 @@ coverage.xml
|
|||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
|
@ -73,7 +72,6 @@ instance/
|
|||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
|
|
@ -84,9 +82,7 @@ profile_default/
|
|||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
|
|
@ -95,22 +91,7 @@ ipython_config.py
|
|||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
|
|
@ -147,20 +128,11 @@ dmypy.json
|
|||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
setup.ipynb
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
.vscode
|
||||
*~
|
||||
/.idea/
|
||||
/.vscode/
|
||||
secrets
|
||||
gtfs/*.zip
|
||||
gtfs/*.pbf
|
||||
|
|
@ -170,12 +142,14 @@ data/enhanced/
|
|||
data/failed/
|
||||
data/trash/
|
||||
data/gtfs/
|
||||
data/tmp/
|
||||
data/agencyconf
|
||||
|
||||
data/grfs
|
||||
data/tmp
|
||||
data/users/**
|
||||
data/**
|
||||
|
||||
#these files are under app/static but they get copied to the outside directory on startup
|
||||
logging.conf
|
||||
config
|
||||
static/*
|
||||
templates/*
|
||||
static/**
|
||||
templates/**
|
||||
conf/**
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ RUN \
|
|||
|
||||
ENV ADMIN_TOKEN=''
|
||||
ENV RIDE2GO_TOKEN=''
|
||||
ENV SECRET_KEY=''
|
||||
ENV METRICS_USER=''
|
||||
ENV METRICS_PASSWORD=''
|
||||
|
||||
|
|
@ -30,13 +31,13 @@ RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
|
|||
RUN --mount=type=secret,id=AMARILLO_REGISTRY_CREDENTIALS \
|
||||
pip install --no-cache-dir --upgrade --extra-index-url https://$(cat /run/secrets/AMARILLO_REGISTRY_CREDENTIALS)@${PACKAGE_REGISTRY_URL} ${PLUGINS}
|
||||
|
||||
COPY ./amarillo/app /app/amarillo/app
|
||||
COPY ./amarillo /app/amarillo
|
||||
COPY ./amarillo/plugins /app/amarillo/plugins
|
||||
COPY ./amarillo/static/static /app/static
|
||||
COPY ./amarillo/static/templates /app/templates
|
||||
COPY ./amarillo/static/config /app
|
||||
COPY ./amarillo/static/logging.conf /app
|
||||
COPY ./conf /app/conf
|
||||
COPY ./amarillo/static/conf /app/conf
|
||||
|
||||
# This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead
|
||||
# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
# CMD ["uvicorn", "amarillo.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
|
|
|||
63
Jenkinsfile
vendored
63
Jenkinsfile
vendored
|
|
@ -7,48 +7,57 @@ pipeline {
|
|||
DOCKER_REGISTRY_URL = 'https://git.gerhardt.io'
|
||||
OWNER = 'amarillo'
|
||||
IMAGE_NAME = 'amarillo'
|
||||
AMARILLO_DISTRIBUTION = '0.1'
|
||||
AMARILLO_DISTRIBUTION = '0.2'
|
||||
TAG = "${AMARILLO_DISTRIBUTION}.${BUILD_NUMBER}"
|
||||
PLUGINS = 'amarillo-metrics amarillo-enhancer'
|
||||
PLUGINS = 'amarillo-metrics amarillo-enhancer amarillo-grfs-exporter'
|
||||
DEPLOY_WEBHOOK_URL = 'http://amarillo.mfdz.de:8888/mitanand'
|
||||
DEPLOY_SECRET = credentials('AMARILLO-JENKINS-DEPLOY-SECRET')
|
||||
}
|
||||
stages {
|
||||
stage('Echo environment variables'){
|
||||
stage('Create virtual environment') {
|
||||
steps {
|
||||
echo "BRANCH_NAME: ${BRANCH_NAME}"
|
||||
echo "JOB_NAME: ${JOB_NAME}"
|
||||
echo "BRANCH_IS_PRIMARY ${BRANCH_IS_PRIMARY}"
|
||||
echo "TAG_NAME: ${TAG_NAME}"
|
||||
echo 'Creating virtual environment'
|
||||
sh '''python3 -m venv .venv
|
||||
. .venv/bin/activate'''
|
||||
|
||||
}
|
||||
}
|
||||
stage('Installing requirements') {
|
||||
steps {
|
||||
echo 'Installing packages'
|
||||
sh 'python3 -m pip install -r requirements.txt'
|
||||
sh 'python3 -m pip install --upgrade build'
|
||||
sh 'python3 -m pip install --upgrade twine'
|
||||
}
|
||||
}
|
||||
stage('Build package') {
|
||||
steps {
|
||||
echo 'Building package'
|
||||
sh 'python3 -m build'
|
||||
}
|
||||
}
|
||||
stage('Publish package') {
|
||||
steps {
|
||||
sh 'python3 -m twine upload --skip-existing --verbose --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*'
|
||||
}
|
||||
}
|
||||
stage('Build docker image') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
echo 'Building image'
|
||||
script {
|
||||
docker.build("${OWNER}/${IMAGE_NAME}:${TAG}",
|
||||
//--no-cache to make sure plugins are updated
|
||||
"-t ${OWNER}/${IMAGE_NAME}:latest --no-cache --build-arg='PACKAGE_REGISTRY_URL=${PLUGINS_REPO_URL}' --build-arg='PLUGINS=${PLUGINS}' --secret id=AMARILLO_REGISTRY_CREDENTIALS,env=GITEA_CREDS .")
|
||||
"--no-cache --build-arg='PACKAGE_REGISTRY_URL=${PLUGINS_REPO_URL}' --build-arg='PLUGINS=${PLUGINS}' --secret id=AMARILLO_REGISTRY_CREDENTIALS,env=GITEA_CREDS .")
|
||||
}
|
||||
}
|
||||
}
|
||||
// stage('Run tests on image'){
|
||||
// steps{
|
||||
// script {
|
||||
// docker.image("${OWNER}/${IMAGE_NAME}:${TAG}").inside(
|
||||
// "--name amarillo -p 8000:80 -e MODULE_NAME=amarillo.app.main -e MAX_WORKERS=1 -e ADMIN_TOKEN=test -e RIDE2GO_TOKEN=test -e METRICS_USER=test -e METRICS_PASSWORD=test -e TZ=Europe/Berlin -v ${pwd()}/data:/app/data"
|
||||
// ){
|
||||
// // TODO: wait until the API is up
|
||||
// c -> sh script: """
|
||||
// sleep 15
|
||||
// echo Testing...
|
||||
// python -m pytest
|
||||
// """
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
stage('Push image to container registry') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
echo 'Pushing image to registry'
|
||||
script {
|
||||
|
|
@ -61,15 +70,15 @@ pipeline {
|
|||
}
|
||||
}
|
||||
stage('Notify CD script') {
|
||||
when {
|
||||
branch 'main'
|
||||
}
|
||||
steps {
|
||||
echo 'Triggering deploy webhook'
|
||||
script {
|
||||
def response = httpRequest contentType: 'APPLICATION_JSON',
|
||||
httpMode: 'POST', requestBody: '{}', authentication: 'AMARILLO-JENKINS-DEPLOY-SECRET',
|
||||
url: "${DEPLOY_WEBHOOK_URL}"
|
||||
|
||||
println("Status: ${response.status}")
|
||||
println("Response: ${response.content}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
recursive-include amarillo/static/ *
|
||||
recursive-include amarillo/app/tests/ *
|
||||
recursive-include amarillo/tests/ *
|
||||
26
README.md
26
README.md
|
|
@ -9,9 +9,11 @@ An Amarillo is a [yellow-dressed person](https://www.dreamstime.com/sancti-spiri
|
|||
- Python 3.9.2 with pip
|
||||
- python3-venv
|
||||
|
||||
Create a virtual environment `python3 -m venv venv`. Activate the environment with `source venv/bin/activate` and install the dependencies `pip install -r requirements.txt`.
|
||||
Create a virtual environment `python3 -m venv venv`.
|
||||
|
||||
Run `uvicorn amarillo.app.main:app`.
|
||||
Activate the environment and install the dependencies `pip install -r requirements.txt`.
|
||||
|
||||
Run `uvicorn amarillo.main:app`.
|
||||
|
||||
In development, you can use `--reload`.
|
||||
|
||||
|
|
@ -20,8 +22,6 @@ In development, you can use `--reload`.
|
|||
- `env`
|
||||
- `ADMIN_TOKEN`
|
||||
|
||||
E.g. set the environment variable like this: `export ADMIN_TOKEN=YOUR_SECRET_TOKEN_HERE`.
|
||||
|
||||
## Security
|
||||
|
||||
All endpoints are protected by an API-Key in the HTTP header.
|
||||
|
|
@ -29,14 +29,14 @@ There is a special *admin* user.
|
|||
For this user, the API-Key must be passed in as an environment variable when
|
||||
Amarillo is started.
|
||||
|
||||
The admin can create additional API-Keys in the `/agencyconf` endpoint. This
|
||||
The admin can create additional API-Keys in the `/users` endpoint. This
|
||||
endpoint is always available but not always shown in `/docs`, especially not
|
||||
when running in production.
|
||||
The Swagger docs for `/agencyconf` can be seen on the MFDZ demo server.
|
||||
The Swagger docs for `/users` can be seen on the MFDZ demo server.
|
||||
|
||||
Permissions work this way
|
||||
- the admin is allowed to call all operations on all resources. Only the admin
|
||||
can create new API-Keys by POSTing an `AgencyConf` JSON object to `/agencyconf`.
|
||||
can create new API-Keys by POSTing an `users` JSON object to `/users`.
|
||||
- API-Keys for agencies are allowed to POST/PUT/GET/DELETE their own
|
||||
resources and GET some public resources.
|
||||
|
||||
|
|
@ -44,23 +44,23 @@ Permissions work this way
|
|||
|
||||
### GTFS-RT python bindings
|
||||
|
||||
In case you modify or update the proto-files in app/proto, you'll need to regenerate the python bindings. First, create the python files:
|
||||
In case you modify or update the proto-files in amarillo/proto, you'll need to regenerate the python bindings. First, create the python files:
|
||||
|
||||
```sh
|
||||
$ cd app/proto
|
||||
$ cd amarillo/proto
|
||||
$ protoc --version
|
||||
libprotoc 3.21.6
|
||||
$ protoc --proto_path=. --python_out=../services/gtfsrt gtfs-realtime.proto realtime_extension.proto
|
||||
$ sed 's/import gtfs_realtime_pb2/import app.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
|
||||
$ sed 's/import gtfs_realtime_pb2/import amarillo.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
In the top directory, run `pytest app/tests`.
|
||||
In the top directory, run `pytest amarillo/tests`.
|
||||
|
||||
## Docker
|
||||
|
||||
Based on [tiangolo/uvicorn-gunicorn:python3.9-slim](https://github.com/tiangolo/uvicorn-gunicorn-docker)
|
||||
|
||||
- build `docker build -t amarillo .`
|
||||
- run `docker run --rm --name amarillo -p 8000:80 -e MODULE_NAME=amarillo.app.main -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e METRICS_USER=$METRICS_USER -e METRICS_PASSWORD=$METRICS_PASSWORD -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`
|
||||
- build `docker build -t amarillo --build-arg="PLUGINS=amarillo-metrics" .`
|
||||
- run `docker run --rm --name amarillo -p 8000:80 -e MAX_WORKERS="1" -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`
|
||||
|
|
|
|||
1
amarillo/__init__.py
Normal file
1
amarillo/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
from pydantic import ConfigDict, BaseModel, Field
|
||||
|
||||
|
||||
class AgencyConf(BaseModel):
|
||||
agency_id: str = Field(
|
||||
description="ID of the agency that uses this token.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
api_key: str = Field(
|
||||
description="The agency's API key for using the API",
|
||||
min_length=20,
|
||||
max_length=256,
|
||||
pattern=r'^[a-zA-Z0-9]+$',
|
||||
examples=["d8yLuY4DqMEUCLcfJASi"])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Agency Configuration",
|
||||
"description": "Configuration for an agency.",
|
||||
"example":
|
||||
{
|
||||
"agency_id": "mfdz",
|
||||
"api_key": "d8yLuY4DqMEUCLcfJASi"
|
||||
}
|
||||
})
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
from collections import namedtuple
|
||||
from datetime import timedelta
|
||||
|
||||
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
|
||||
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
|
||||
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name')
|
||||
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
|
||||
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
|
||||
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed')
|
||||
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
|
||||
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
|
||||
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
|
||||
|
||||
# TODO Move to utils
|
||||
class GtfsTimeDelta(timedelta):
|
||||
def __str__(self):
|
||||
seconds = self.total_seconds()
|
||||
hours = seconds // 3600
|
||||
minutes = (seconds % 3600) // 60
|
||||
seconds = seconds % 60
|
||||
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
|
||||
return (str)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, timedelta):
|
||||
return self.__class__(self.days + other.days,
|
||||
self.seconds + other.seconds,
|
||||
self.microseconds + other.microseconds)
|
||||
return NotImplemented
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Header, Depends
|
||||
|
||||
from amarillo.app.models.AgencyConf import AgencyConf
|
||||
from amarillo.app.services.agencyconf import AgencyConfService
|
||||
from amarillo.app.services.config import config
|
||||
from amarillo.app.utils.container import container
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/agencyconf",
|
||||
tags=["agencyconf"]
|
||||
)
|
||||
|
||||
# This endpoint is not shown in PROD installations, only in development
|
||||
# TODO make this an explicit config option
|
||||
include_in_schema = config.env != 'PROD'
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
# X_API_Key is upper case for OpenAPI
|
||||
async def verify_admin_api_key(X_API_Key: str = Header(...)):
|
||||
if X_API_Key != config.admin_token:
|
||||
message="X-API-Key header invalid"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
return "admin"
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
# X_API_Key is upper case for OpenAPI
|
||||
async def verify_api_key(X_API_Key: str = Header(...)):
|
||||
agency_conf_service: AgencyConfService = container['agencyconf']
|
||||
|
||||
return agency_conf_service.check_api_key(X_API_Key)
|
||||
|
||||
# TODO Return code 403 Unauthoized (in response_status_codes as well...)
|
||||
async def verify_permission_for_same_agency_or_admin(agency_id_in_path_or_body, agency_id_from_api_key):
|
||||
"""Verifies that an agency is accessing something it owns or the user is admin
|
||||
|
||||
The agency_id is part of some paths, or when not in the path it is in the body, e.g. in PUT /carpool.
|
||||
|
||||
This function encapsulates the formula 'working with own stuff, or admin'.
|
||||
"""
|
||||
is_permitted = agency_id_in_path_or_body == agency_id_from_api_key or agency_id_from_api_key == "admin"
|
||||
|
||||
if not is_permitted:
|
||||
message = f"Working with {agency_id_in_path_or_body} resources is not permitted for {agency_id_from_api_key}."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
|
||||
@router.get("/",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="getAgencyIdsWhichHaveAConfiguration",
|
||||
summary="Get agency_ids which have a configuration",
|
||||
response_model=List[str],
|
||||
description="Returns the agency_ids but not the details.",
|
||||
status_code=status.HTTP_200_OK)
|
||||
async def get_agency_ids(admin_api_key: str = Depends(verify_api_key)) -> [str]:
|
||||
return container['agencyconf'].get_agency_ids()
|
||||
|
||||
|
||||
@router.post("/",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="postNewAgencyConf",
|
||||
summary="Post a new AgencyConf")
|
||||
async def post_agency_conf(agency_conf: AgencyConf, admin_api_key: str = Depends(verify_admin_api_key)):
|
||||
agency_conf_service: AgencyConfService = container['agencyconf']
|
||||
agency_conf_service.add(agency_conf)
|
||||
|
||||
# TODO 400->403
|
||||
@router.delete("/{agency_id}",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="deleteAgencyConf",
|
||||
status_code=status.HTTP_200_OK,
|
||||
summary="Delete configuration of an agency. Returns true if the token for the agency existed, "
|
||||
"false if it didn't exist."
|
||||
)
|
||||
async def delete_agency_conf(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)):
|
||||
agency_may_delete_own = requesting_agency_id == agency_id
|
||||
admin_may_delete_everything = requesting_agency_id == "admin"
|
||||
is_permitted = agency_may_delete_own or admin_may_delete_everything
|
||||
|
||||
if not is_permitted:
|
||||
message = f"The API key for {requesting_agency_id} can not delete the configuration for {agency_id}"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
agency_conf_service: AgencyConfService = container['agencyconf']
|
||||
|
||||
agency_exists = agency_id in agency_conf_service.get_agency_ids()
|
||||
|
||||
if not agency_exists:
|
||||
message = f"No config for {agency_id}"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
agency_conf_service.delete(agency_id)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
|
||||
from amarillo.app.models.Carpool import Region
|
||||
from amarillo.app.routers.agencyconf import verify_admin_api_key
|
||||
from amarillo.app.services.regions import RegionService
|
||||
from amarillo.app.utils.container import container
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/region",
|
||||
tags=["region"]
|
||||
)
|
||||
|
||||
|
||||
@router.get("/",
|
||||
operation_id="getRegions",
|
||||
summary="Return all regions",
|
||||
response_model=List[Region],
|
||||
responses={
|
||||
},
|
||||
)
|
||||
async def get_regions() -> List[Region]:
|
||||
service: RegionService = container['regions']
|
||||
return list(service.regions.values())
|
||||
|
||||
@router.get("/{region_id}",
|
||||
operation_id="getRegionById",
|
||||
summary="Find region by ID",
|
||||
response_model=Region,
|
||||
description="Find region by ID",
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
},
|
||||
)
|
||||
async def get_region(region_id: str) -> Region:
|
||||
region = _assert_region_exists(region_id)
|
||||
logger.info(f"Get region {region_id}.")
|
||||
|
||||
return region
|
||||
|
||||
def _assert_region_exists(region_id: str) -> Region:
|
||||
regions: RegionService = container['regions']
|
||||
region = regions.get_region(region_id)
|
||||
region_exists = region is not None
|
||||
|
||||
if not region_exists:
|
||||
message = f"Region with id {region_id} does not exist."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
|
||||
|
||||
return region
|
||||
|
||||
@router.get("/{region_id}/gtfs",
|
||||
summary="Return GTFS Feed for this region",
|
||||
response_description="GTFS-Feed (zip-file)",
|
||||
response_class=FileResponse,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
}
|
||||
)
|
||||
async def get_file(region_id: str, user: str = Depends(verify_admin_api_key)):
|
||||
_assert_region_exists(region_id)
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
|
||||
|
||||
@router.get("/{region_id}/gtfs-rt",
|
||||
summary="Return GTFS-RT Feed for this region",
|
||||
response_description="GTFS-RT-Feed",
|
||||
response_class=FileResponse,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
|
||||
}
|
||||
)
|
||||
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_admin_api_key)):
|
||||
_assert_region_exists(region_id)
|
||||
if format == 'json':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
|
||||
elif format == 'protobuf':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
|
||||
else:
|
||||
message = "Specified format is not supported, i.e. neither protobuf nor json."
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
from glob import glob
|
||||
from typing import Dict, List
|
||||
import logging
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from amarillo.app.models.AgencyConf import AgencyConf
|
||||
from amarillo.app.services.config import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
agency_conf_directory = 'data/agencyconf'
|
||||
|
||||
|
||||
class AgencyConfService:
|
||||
|
||||
def __init__(self):
|
||||
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
|
||||
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
|
||||
self.agency_id_to_agency_conf: Dict[str, AgencyConf] = {}
|
||||
self.api_key_to_agency_id: Dict[str, str] = {}
|
||||
|
||||
for agency_conf_file_name in glob(f'{agency_conf_directory}/*.json'):
|
||||
with open(agency_conf_file_name) as agency_conf_file:
|
||||
dictionary = json.load(agency_conf_file)
|
||||
|
||||
agency_conf = AgencyConf(**dictionary)
|
||||
|
||||
agency_id = agency_conf.agency_id
|
||||
api_key = agency_conf.api_key
|
||||
|
||||
self.agency_id_to_agency_conf[agency_id] = agency_conf
|
||||
self.api_key_to_agency_id[api_key] = agency_conf.agency_id
|
||||
|
||||
def get_agency_conf(self, agency_id: str) -> AgencyConf:
|
||||
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
|
||||
return agency_conf
|
||||
|
||||
def check_api_key(self, api_key: str) -> str:
|
||||
"""Check if the API key is valid
|
||||
|
||||
The agencies' api keys are checked first, and the admin's key.
|
||||
|
||||
The agency_id or "admin" is returned for further checks in the caller if the
|
||||
request is permitted, like {agency_id} == agency_id.
|
||||
"""
|
||||
|
||||
agency_id = self.api_key_to_agency_id.get(api_key)
|
||||
|
||||
is_agency = agency_id is not None
|
||||
|
||||
if is_agency:
|
||||
return agency_id
|
||||
|
||||
is_admin = api_key == config.admin_token
|
||||
|
||||
if is_admin:
|
||||
return "admin"
|
||||
|
||||
message = "X-API-Key header invalid"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=400, detail=message)
|
||||
|
||||
def add(self, agency_conf: AgencyConf):
|
||||
|
||||
agency_id = agency_conf.agency_id
|
||||
api_key = agency_conf.api_key
|
||||
|
||||
agency_id_exists_already = self.agency_id_to_agency_conf.get(agency_id) is not None
|
||||
|
||||
if agency_id_exists_already:
|
||||
message = f"Agency {agency_id} exists already. To update, delete it first."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
agency_using_this_api_key_already = self.api_key_to_agency_id.get(api_key)
|
||||
a_different_agency_is_using_this_api_key_already = \
|
||||
agency_using_this_api_key_already is not None and \
|
||||
agency_using_this_api_key_already != agency_id
|
||||
|
||||
if a_different_agency_is_using_this_api_key_already:
|
||||
message = f"Duplicate API Key for {agency_id} not permitted. Use a different key."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
with open(f'{agency_conf_directory}/{agency_id}.json', 'w', encoding='utf-8') as f:
|
||||
f.write(agency_conf.json())
|
||||
|
||||
self.agency_id_to_agency_conf[agency_id] = agency_conf
|
||||
self.api_key_to_agency_id[api_key] = agency_id
|
||||
|
||||
logger.info(f"Added configuration for agency {agency_id}.")
|
||||
|
||||
def get_agency_ids(self) -> List[str]:
|
||||
return list(self.agency_id_to_agency_conf.keys())
|
||||
|
||||
def delete(self, agency_id):
|
||||
|
||||
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
|
||||
|
||||
api_key = agency_conf.api_key
|
||||
|
||||
del self.api_key_to_agency_id[api_key]
|
||||
|
||||
del self.agency_id_to_agency_conf[agency_id]
|
||||
|
||||
os.remove(f'{agency_conf_directory}/{agency_id}.json')
|
||||
|
||||
logger.info(f"Deleted configuration for agency {agency_id}.")
|
||||
|
|
@ -1,137 +0,0 @@
|
|||
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2
|
||||
import amarillo.app.services.gtfsrt.realtime_extension_pb2 as mfdzrte
|
||||
from amarillo.app.services.gtfs_constants import *
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
from google.protobuf.json_format import ParseDict
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
|
||||
class GtfsRtProducer():
|
||||
|
||||
def __init__(self, trip_store):
|
||||
self.trip_store = trip_store
|
||||
|
||||
def generate_feed(self, time, format='protobuf', bbox=None):
|
||||
# See https://developers.google.com/transit/gtfs-realtime/reference
|
||||
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
|
||||
gtfsrt_dict = {
|
||||
'header': {
|
||||
'gtfsRealtimeVersion': '1.0',
|
||||
'timestamp': int(time)
|
||||
},
|
||||
'entity': self._get_trip_updates(bbox)
|
||||
}
|
||||
feed = gtfs_realtime_pb2.FeedMessage()
|
||||
ParseDict(gtfsrt_dict, feed)
|
||||
|
||||
if "message" == format.lower():
|
||||
return feed
|
||||
elif "json" == format.lower():
|
||||
return MessageToDict(feed)
|
||||
else:
|
||||
return feed.SerializeToString()
|
||||
|
||||
def export_feed(self, timestamp, file_path, bbox=None):
|
||||
"""
|
||||
Exports gtfs-rt feed as .json and .pbf file to file_path
|
||||
"""
|
||||
feed = self.generate_feed(timestamp, "message", bbox)
|
||||
with open(f"{file_path}.pbf", "wb") as f:
|
||||
f.write(feed.SerializeToString())
|
||||
with open(f"{file_path}.json", "w") as f:
|
||||
json.dump(MessageToDict(feed), f)
|
||||
|
||||
def _get_trip_updates(self, bbox = None):
|
||||
trips = []
|
||||
trips.extend(self._get_added(bbox))
|
||||
trips.extend(self._get_deleted(bbox))
|
||||
trip_updates = []
|
||||
for num, trip in enumerate(trips):
|
||||
trip_updates.append( {
|
||||
'id': f'carpool-update-{num}',
|
||||
'tripUpdate': trip
|
||||
}
|
||||
)
|
||||
return trip_updates
|
||||
|
||||
def _get_deleted(self, bbox = None):
|
||||
return self._get_updates(
|
||||
self.trip_store.recently_deleted_trips(),
|
||||
self._as_delete_updates,
|
||||
bbox)
|
||||
|
||||
def _get_added(self, bbox = None):
|
||||
return self._get_updates(
|
||||
self.trip_store.recently_added_trips(),
|
||||
self._as_added_updates,
|
||||
bbox)
|
||||
|
||||
def _get_updates(self, trips, update_func, bbox = None):
|
||||
updates = []
|
||||
today = datetime.today()
|
||||
for t in trips:
|
||||
if bbox == None or t.intersects(bbox):
|
||||
updates.extend(update_func(t, today))
|
||||
return updates
|
||||
|
||||
def _as_delete_updates(self, trip, fromdate):
|
||||
return [{
|
||||
'trip': {
|
||||
'tripId': trip.trip_id,
|
||||
'startTime': trip.start_time_str(),
|
||||
'startDate': trip_date,
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': trip.trip_id
|
||||
}
|
||||
} for trip_date in trip.next_trip_dates(fromdate)]
|
||||
|
||||
def _to_seconds(self, fromdate, stop_time):
|
||||
startdate = datetime.strptime(fromdate, '%Y%m%d')
|
||||
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
|
||||
delta = timedelta(
|
||||
hours=int(m.group(1)),
|
||||
minutes=int(m.group(2)),
|
||||
seconds=int(m.group(3)))
|
||||
return time.mktime((startdate + delta).timetuple())
|
||||
|
||||
def _to_stop_times(self, trip, fromdate):
|
||||
return [{
|
||||
'stopSequence': stoptime.stop_sequence,
|
||||
'arrival': {
|
||||
'time': self._to_seconds(fromdate, stoptime.arrival_time),
|
||||
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
|
||||
},
|
||||
'departure': {
|
||||
'time': self._to_seconds(fromdate, stoptime.departure_time),
|
||||
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
|
||||
},
|
||||
'stopId': stoptime.stop_id,
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
|
||||
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
|
||||
}
|
||||
}
|
||||
}
|
||||
for stoptime in trip.stop_times]
|
||||
|
||||
def _as_added_updates(self, trip, fromdate):
|
||||
return [{
|
||||
'trip': {
|
||||
'tripId': trip.trip_id,
|
||||
'startTime': trip.start_time_str(),
|
||||
'startDate': trip_date,
|
||||
'scheduleRelationship': 'ADDED',
|
||||
'routeId': trip.trip_id,
|
||||
'[transit_realtime.trip_descriptor]': {
|
||||
'routeUrl' : trip.url,
|
||||
'agencyId' : trip.agency,
|
||||
'route_long_name' : trip.route_long_name(),
|
||||
'route_type': RIDESHARING_ROUTE_TYPE
|
||||
}
|
||||
},
|
||||
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
|
||||
} for trip_date in trip.next_trip_dates(fromdate)]
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
# Constants
|
||||
|
||||
NO_BIKES_ALLOWED = 2
|
||||
RIDESHARING_ROUTE_TYPE = 1551
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
|
||||
STOP_TIMES_STOP_TYPE_REGULARLY = 0
|
||||
STOP_TIMES_STOP_TYPE_NONE = 1
|
||||
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
|
||||
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
|
||||
STOP_TIMES_TIMEPOINT_EXACT = 1
|
||||
|
||||
MFDZ_DEFAULT_UNCERTAINITY = 600
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1,33 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: realtime_extension.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
|
||||
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
|
||||
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
|
||||
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
|
||||
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
import requests
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RoutingException(Exception):
|
||||
def __init__(self, message):
|
||||
# Call Exception.__init__(message)
|
||||
# to use the same Message header as the parent class
|
||||
super().__init__(message)
|
||||
|
||||
class RoutingService():
|
||||
def __init__(self, gh_url = 'https://api.mfdz.de/gh'):
|
||||
self.gh_service_url = gh_url
|
||||
|
||||
def path_for_stops(self, points):
|
||||
# Retrieve graphhopper route traversing given points
|
||||
directions = self._get_directions(points)
|
||||
if directions and len(directions.get("paths"))>0:
|
||||
return directions.get("paths")[0]
|
||||
else:
|
||||
return {}
|
||||
|
||||
def _get_directions(self, points):
|
||||
req_url = self._create_url(points, True, True)
|
||||
logger.debug("Get directions via: {}".format(req_url))
|
||||
response = requests.get(req_url)
|
||||
status = response.status_code
|
||||
if status == 200:
|
||||
# Found route between points
|
||||
return response.json()
|
||||
else:
|
||||
try:
|
||||
message = response.json().get('message')
|
||||
except:
|
||||
raise RoutingException("Get directions failed with status code {}".format(status))
|
||||
else:
|
||||
raise RoutingException(message)
|
||||
|
||||
def _create_url(self, points, calc_points = False, instructions = False):
|
||||
""" Creates GH request URL """
|
||||
locations = ""
|
||||
for point in points:
|
||||
locations += "point={0}%2C{1}&".format(point.y, point.x)
|
||||
|
||||
return "{0}/route?{1}instructions={2}&calc_points={3}&points_encoded=false".format(
|
||||
self.gh_service_url, locations, instructions, calc_points)
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
import csv
|
||||
import geopandas as gpd
|
||||
import pandas as pd
|
||||
from amarillo.app.models.Carpool import StopTime
|
||||
from contextlib import closing
|
||||
from shapely.geometry import Point, LineString
|
||||
from shapely.ops import transform
|
||||
from pyproj import Proj, Transformer
|
||||
import re
|
||||
import requests
|
||||
from io import TextIOWrapper
|
||||
import codecs
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class StopsStore():
|
||||
|
||||
def __init__(self, stop_sources = [], internal_projection = "EPSG:32632"):
|
||||
self.internal_projection = internal_projection
|
||||
self.projection = Transformer.from_crs("EPSG:4326", internal_projection, always_xy=True).transform
|
||||
self.stopsDataFrames = []
|
||||
self.stop_sources = stop_sources
|
||||
|
||||
|
||||
def load_stop_sources(self):
|
||||
"""Imports stops from stop_sources and registers them with
|
||||
the distance they are still associated with a trip.
|
||||
E.g. bus stops should be registered with a distance of e.g. 30m,
|
||||
while larger carpool parkings might be registered with e.g. 500m.
|
||||
|
||||
Subsequent calls of load_stop_sources will reload all stop_sources
|
||||
but replace the current stops only if all stops could be loaded successfully.
|
||||
"""
|
||||
stopsDataFrames = []
|
||||
error_occured = False
|
||||
|
||||
for stops_source in self.stop_sources:
|
||||
try:
|
||||
stopsDataFrame =self._load_stops(stops_source["url"])
|
||||
stopsDataFrames.append({'distanceInMeter': stops_source["vicinity"],
|
||||
'stops': stopsDataFrame})
|
||||
except Exception as err:
|
||||
error_occured = True
|
||||
logger.error("Failed to load stops from %s to StopsStore.", stops_source["url"], exc_info=True)
|
||||
|
||||
if not error_occured:
|
||||
self.stopsDataFrames = stopsDataFrames
|
||||
|
||||
def find_additional_stops_around(self, line, stops = None):
|
||||
"""Returns a GeoDataFrame with all stops in vicinity of the
|
||||
given line, sorted by distance from origin of the line.
|
||||
Note: for internal projection/distance calculations, the
|
||||
lat/lon geometries of line and stops are converted to
|
||||
"""
|
||||
stops_frames = []
|
||||
if stops:
|
||||
stops_frames.append(self._convert_to_dataframe(stops))
|
||||
transformedLine = transform(self.projection, LineString(line.coordinates))
|
||||
for stops_to_match in self.stopsDataFrames:
|
||||
stops_frames.append(self._find_stops_around_transformed(stops_to_match['stops'], transformedLine, stops_to_match['distanceInMeter']))
|
||||
stops = gpd.GeoDataFrame( pd.concat(stops_frames, ignore_index=True, sort=True))
|
||||
if not stops.empty:
|
||||
self._sort_by_distance(stops, transformedLine)
|
||||
return stops
|
||||
|
||||
def find_closest_stop(self, carpool_stop, max_search_distance):
|
||||
transformedCoord = Point(self.projection(carpool_stop.lon, carpool_stop.lat))
|
||||
best_dist = max_search_distance + 1
|
||||
best_stop = None
|
||||
for stops_with_dist in self.stopsDataFrames:
|
||||
stops = stops_with_dist['stops']
|
||||
s, d = stops.sindex.nearest(transformedCoord, return_all= True, return_distance=True, max_distance=max_search_distance)
|
||||
if len(d) > 0 and d[0] < best_dist:
|
||||
best_dist = d[0]
|
||||
row = s[1][0]
|
||||
best_stop = StopTime(name=stops.at[row, 'stop_name'], lat=stops.at[row, 'y'], lon=stops.at[row, 'x'])
|
||||
|
||||
return best_stop if best_stop else carpool_stop
|
||||
|
||||
def _normalize_stop_name(self, stop_name):
|
||||
default_name = 'P+R-Parkplatz'
|
||||
if stop_name in ('', 'Park&Ride'):
|
||||
return default_name
|
||||
normalized_stop_name = re.sub(r"P(ark)?\s?[\+&]\s?R(ail|ide)?",'P+R', stop_name)
|
||||
|
||||
return normalized_stop_name
|
||||
|
||||
def _load_stops(self, source : str):
|
||||
"""Loads stops from given source and registers them with
|
||||
the distance they are still associated with a trip.
|
||||
E.g. bus stops should be registered with a distance of e.g. 30m,
|
||||
while larger carpool parkings might be registered with e.g. 500m
|
||||
"""
|
||||
logger.info("Load stops from %s", source)
|
||||
if source.startswith('http'):
|
||||
if source.endswith('json'):
|
||||
with requests.get(source) as json_source:
|
||||
stopsDataFrame = self._load_stops_geojson(json_source.json())
|
||||
else:
|
||||
with requests.get(source) as csv_source:
|
||||
stopsDataFrame = self._load_stops_csv(codecs.iterdecode(csv_source.iter_lines(), 'utf-8'))
|
||||
else:
|
||||
with open(source, encoding='utf-8') as csv_source:
|
||||
stopsDataFrame = self._load_stops_csv(csv_source)
|
||||
|
||||
return stopsDataFrame
|
||||
|
||||
def _load_stops_csv(self, csv_source):
|
||||
id = []
|
||||
lat = []
|
||||
lon = []
|
||||
stop_name = []
|
||||
reader = csv.DictReader(csv_source, delimiter=';')
|
||||
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
|
||||
lists = [id, lat, lon, stop_name]
|
||||
for row in reader:
|
||||
for col, lst in zip(columns, lists):
|
||||
if col == "stop_lat" or col == "stop_lon":
|
||||
lst.append(float(row[col].replace(",",".")))
|
||||
elif col == "stop_name":
|
||||
row_stop_name = self._normalize_stop_name(row[col])
|
||||
lst.append(row_stop_name)
|
||||
else:
|
||||
lst.append(row[col])
|
||||
|
||||
return self._as_dataframe(id, lat, lon, stop_name)
|
||||
|
||||
def _load_stops_geojson(self, geojson_source):
|
||||
id = []
|
||||
lat = []
|
||||
lon = []
|
||||
stop_name = []
|
||||
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
|
||||
lists = [id, lat, lon, stop_name]
|
||||
for row in geojson_source['features']:
|
||||
coord = row['geometry']['coordinates']
|
||||
if not coord or not row['properties'].get('name'):
|
||||
logger.error('Stop feature {} has null coord or name'.format(row['id']))
|
||||
continue
|
||||
for col, lst in zip(columns, lists):
|
||||
if col == "stop_lat":
|
||||
lst.append(coord[1])
|
||||
elif col == "stop_lon":
|
||||
lst.append(coord[0])
|
||||
elif col == "stop_name":
|
||||
row_stop_name = self._normalize_stop_name(row['properties']['name'])
|
||||
lst.append(row_stop_name)
|
||||
elif col == "stop_id":
|
||||
lst.append(row['id'])
|
||||
|
||||
return self._as_dataframe(id, lat, lon, stop_name)
|
||||
|
||||
def _as_dataframe(self, id, lat, lon, stop_name):
|
||||
|
||||
df = gpd.GeoDataFrame(data={'x':lon, 'y':lat, 'stop_name':stop_name, 'id':id})
|
||||
stopsGeoDataFrame = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.x, df.y, crs='EPSG:4326'))
|
||||
stopsGeoDataFrame.to_crs(crs=self.internal_projection, inplace=True)
|
||||
return stopsGeoDataFrame
|
||||
|
||||
def _find_stops_around_transformed(self, stopsDataFrame, transformedLine, distance):
|
||||
bufferedLine = transformedLine.buffer(distance)
|
||||
sindex = stopsDataFrame.sindex
|
||||
possible_matches_index = list(sindex.intersection(bufferedLine.bounds))
|
||||
possible_matches = stopsDataFrame.iloc[possible_matches_index]
|
||||
exact_matches = possible_matches[possible_matches.intersects(bufferedLine)]
|
||||
|
||||
return exact_matches
|
||||
|
||||
def _convert_to_dataframe(self, stops):
|
||||
return gpd.GeoDataFrame([[stop.name, stop.lon, stop.lat,
|
||||
stop.id, Point(self.projection(stop.lon, stop.lat))] for stop in stops], columns = ['stop_name','x','y','id','geometry'], crs=self.internal_projection)
|
||||
|
||||
def _sort_by_distance(self, stops, transformedLine):
|
||||
stops['distance']=stops.apply(lambda row: transformedLine.project(row['geometry']), axis=1)
|
||||
stops.sort_values('distance', inplace=True)
|
||||
|
||||
def is_carpooling_stop(stop_id, name):
|
||||
stop_name = name.lower()
|
||||
# mfdz: or bbnavi: prefixed stops are custom stops which are explicitly meant to be carpooling stops
|
||||
return stop_id.startswith('mfdz:') or stop_id.startswith('bbnavi:') or 'mitfahr' in stop_name or 'p&m' in stop_name
|
||||
|
||||
|
|
@ -1,374 +0,0 @@
|
|||
from amarillo.app.models.gtfs import GtfsTimeDelta, GtfsStopTime
|
||||
from amarillo.app.models.Carpool import MAX_STOPS_PER_TRIP, Carpool, Weekday, StopTime, PickupDropoffType
|
||||
from amarillo.app.services.gtfs_constants import *
|
||||
from amarillo.app.services.routing import RoutingService, RoutingException
|
||||
from amarillo.app.services.stops import is_carpooling_stop
|
||||
from amarillo.app.utils.utils import assert_folder_exists, is_older_than_days, yesterday, geodesic_distance_in_m
|
||||
from shapely.geometry import Point, LineString, box
|
||||
from geojson_pydantic.geometries import LineString as GeoJSONLineString
|
||||
from datetime import datetime, timedelta
|
||||
import numpy as np
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Trip:
|
||||
|
||||
def __init__(self, trip_id, route_name, headsign, url, calendar, departureTime, path, agency, lastUpdated, stop_times, bbox):
|
||||
if isinstance(calendar, set):
|
||||
self.runs_regularly = True
|
||||
self.weekdays = [
|
||||
1 if Weekday.monday in calendar else 0,
|
||||
1 if Weekday.tuesday in calendar else 0,
|
||||
1 if Weekday.wednesday in calendar else 0,
|
||||
1 if Weekday.thursday in calendar else 0,
|
||||
1 if Weekday.friday in calendar else 0,
|
||||
1 if Weekday.saturday in calendar else 0,
|
||||
1 if Weekday.sunday in calendar else 0,
|
||||
]
|
||||
start_in_day = self._total_seconds(departureTime)
|
||||
else:
|
||||
self.start = datetime.combine(calendar, departureTime)
|
||||
self.runs_regularly = False
|
||||
self.weekdays = [0,0,0,0,0,0,0]
|
||||
|
||||
self.start_time = departureTime
|
||||
self.path = path
|
||||
self.trip_id = trip_id
|
||||
self.url = url
|
||||
self.agency = agency
|
||||
self.stops = []
|
||||
self.lastUpdated = lastUpdated
|
||||
self.stop_times = stop_times
|
||||
self.bbox = bbox
|
||||
self.route_name = route_name
|
||||
self.trip_headsign = headsign
|
||||
|
||||
def path_as_line_string(self):
|
||||
return path
|
||||
|
||||
def _total_seconds(self, instant):
|
||||
return instant.hour * 3600 + instant.minute * 60 + instant.second
|
||||
|
||||
def start_time_str(self):
|
||||
return self.start_time.strftime("%H:%M:%S")
|
||||
|
||||
def next_trip_dates(self, start_date, day_count=14):
|
||||
if self.runs_regularly:
|
||||
for single_date in (start_date + timedelta(n) for n in range(day_count)):
|
||||
if self.weekdays[single_date.weekday()]==1:
|
||||
yield single_date.strftime("%Y%m%d")
|
||||
else:
|
||||
yield self.start.strftime("%Y%m%d")
|
||||
|
||||
def route_long_name(self):
|
||||
return self.route_name
|
||||
|
||||
def intersects(self, bbox):
|
||||
return self.bbox.intersects(box(*bbox))
|
||||
|
||||
|
||||
class TripStore():
|
||||
"""
|
||||
TripStore maintains the currently valid trips. A trip is a
|
||||
carpool offer enhanced with all stops this
|
||||
|
||||
Attributes:
|
||||
trips Dict of currently valid trips.
|
||||
deleted_trips Dict of recently deleted trips.
|
||||
"""
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.transformer = TripTransformer(stops_store)
|
||||
self.stops_store = stops_store
|
||||
self.trips = {}
|
||||
self.deleted_trips = {}
|
||||
self.recent_trips = {}
|
||||
|
||||
|
||||
def put_carpool(self, carpool: Carpool):
|
||||
"""
|
||||
Adds carpool to the TripStore.
|
||||
"""
|
||||
id = "{}:{}".format(carpool.agency, carpool.id)
|
||||
filename = f'data/enhanced/{carpool.agency}/{carpool.id}.json'
|
||||
try:
|
||||
existing_carpool = self._load_carpool_if_exists(carpool.agency, carpool.id)
|
||||
if existing_carpool and existing_carpool.lastUpdated == carpool.lastUpdated:
|
||||
enhanced_carpool = existing_carpool
|
||||
else:
|
||||
if len(carpool.stops) < 2 or self.distance_in_m(carpool) < 1000:
|
||||
logger.warning("Failed to add carpool %s:%s to TripStore, distance too low", carpool.agency, carpool.id)
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
return
|
||||
enhanced_carpool = self.transformer.enhance_carpool(carpool)
|
||||
# TODO should only store enhanced_carpool, if it has 2 or more stops
|
||||
assert_folder_exists(f'data/enhanced/{carpool.agency}/')
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
f.write(enhanced_carpool.json())
|
||||
logger.info("Added enhanced carpool %s:%s", carpool.agency, carpool.id)
|
||||
|
||||
return self._load_as_trip(enhanced_carpool)
|
||||
except RoutingException as err:
|
||||
logger.warning("Failed to add carpool %s:%s to TripStore due to RoutingException %s", carpool.agency, carpool.id, getattr(err, 'message', repr(err)))
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
except Exception as err:
|
||||
logger.error("Failed to add carpool %s:%s to TripStore.", carpool.agency, carpool.id, exc_info=True)
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
|
||||
def handle_failed_carpool_enhancement(sellf, carpool: Carpool):
|
||||
assert_folder_exists(f'data/failed/{carpool.agency}/')
|
||||
with open(f'data/failed/{carpool.agency}/{carpool.id}.json', 'w', encoding='utf-8') as f:
|
||||
f.write(carpool.json())
|
||||
|
||||
def distance_in_m(self, carpool):
|
||||
if len(carpool.stops) < 2:
|
||||
return 0
|
||||
s1 = carpool.stops[0]
|
||||
s2 = carpool.stops[-1]
|
||||
return geodesic_distance_in_m((s1.lon, s1.lat),(s2.lon, s2.lat))
|
||||
|
||||
def recently_added_trips(self):
|
||||
return list(self.recent_trips.values())
|
||||
|
||||
def recently_deleted_trips(self):
|
||||
return list(self.deleted_trips.values())
|
||||
|
||||
def _load_carpool_if_exists(self, agency_id: str, carpool_id: str):
|
||||
if carpool_exists(agency_id, carpool_id, 'data/enhanced'):
|
||||
try:
|
||||
return load_carpool(agency_id, carpool_id, 'data/enhanced')
|
||||
except Exception as e:
|
||||
# An error on restore could be caused by model changes,
|
||||
# in such a case, it need's to be recreated
|
||||
logger.warning("Could not restore enhanced trip %s:%s, reason: %s", agency_id, carpool_id, repr(e))
|
||||
|
||||
return None
|
||||
|
||||
def _load_as_trip(self, carpool: Carpool):
|
||||
trip = self.transformer.transform_to_trip(carpool)
|
||||
id = trip.trip_id
|
||||
self.trips[id] = trip
|
||||
if not is_older_than_days(carpool.lastUpdated, 1):
|
||||
self.recent_trips[id] = trip
|
||||
logger.debug("Added trip %s", id)
|
||||
|
||||
return trip
|
||||
|
||||
def delete_carpool(self, agency_id: str, carpool_id: str):
|
||||
"""
|
||||
Deletes carpool from the TripStore.
|
||||
"""
|
||||
agencyScopedCarpoolId = f"{agency_id}:{carpool_id}"
|
||||
trip_to_be_deleted = self.trips.get(agencyScopedCarpoolId)
|
||||
if trip_to_be_deleted:
|
||||
self.deleted_trips[agencyScopedCarpoolId] = trip_to_be_deleted
|
||||
del self.trips[agencyScopedCarpoolId]
|
||||
|
||||
if self.recent_trips.get(agencyScopedCarpoolId):
|
||||
del self.recent_trips[agencyScopedCarpoolId]
|
||||
|
||||
if carpool_exists(agency_id, carpool_id):
|
||||
remove_carpool_file(agency_id, carpool_id)
|
||||
|
||||
logger.debug("Deleted trip %s", id)
|
||||
|
||||
def unflag_unrecent_updates(self):
|
||||
"""
|
||||
Trips that were last updated before yesterday, are not recent
|
||||
any longer. As no updates need to be sent for them any longer,
|
||||
they will be removed from recent recent_trips and deleted_trips.
|
||||
"""
|
||||
for key in list(self.recent_trips):
|
||||
t = self.recent_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.recent_trips[key]
|
||||
|
||||
for key in list(self.deleted_trips):
|
||||
t = self.deleted_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.deleted_trips[key]
|
||||
|
||||
|
||||
class TripTransformer:
|
||||
REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS = True
|
||||
REPLACEMENT_STOPS_SERACH_RADIUS_IN_M = 1000
|
||||
SIMPLIFY_TOLERANCE = 0.0001
|
||||
|
||||
router = RoutingService()
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.stops_store = stops_store
|
||||
|
||||
def transform_to_trip(self, carpool):
|
||||
stop_times = self._convert_stop_times(carpool)
|
||||
route_name = carpool.stops[0].name + " nach " + carpool.stops[-1].name
|
||||
headsign= carpool.stops[-1].name
|
||||
trip_id = self._trip_id(carpool)
|
||||
path = carpool.path
|
||||
bbox = box(
|
||||
min([pt[0] for pt in path.coordinates]),
|
||||
min([pt[1] for pt in path.coordinates]),
|
||||
max([pt[0] for pt in path.coordinates]),
|
||||
max([pt[1] for pt in path.coordinates]))
|
||||
|
||||
trip = Trip(trip_id, route_name, headsign, str(carpool.deeplink), carpool.departureDate, carpool.departureTime, carpool.path, carpool.agency, carpool.lastUpdated, stop_times, bbox)
|
||||
|
||||
return trip
|
||||
|
||||
def _trip_id(self, carpool):
|
||||
return f"{carpool.agency}:{carpool.id}"
|
||||
|
||||
def _replace_stops_by_transit_stops(self, carpool, max_search_distance):
|
||||
new_stops = []
|
||||
for carpool_stop in carpool.stops:
|
||||
new_stops.append(self.stops_store.find_closest_stop(carpool_stop, max_search_distance))
|
||||
return new_stops
|
||||
|
||||
def enhance_carpool(self, carpool):
|
||||
if self.REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS:
|
||||
carpool.stops = self._replace_stops_by_transit_stops(carpool, self.REPLACEMENT_STOPS_SERACH_RADIUS_IN_M)
|
||||
|
||||
path = self._path_for_ride(carpool)
|
||||
lineString_shapely_wgs84 = LineString(coordinates = path["points"]["coordinates"]).simplify(0.0001)
|
||||
lineString_wgs84 = GeoJSONLineString(type="LineString", coordinates=list(lineString_shapely_wgs84.coords))
|
||||
virtual_stops = self.stops_store.find_additional_stops_around(lineString_wgs84, carpool.stops)
|
||||
if not virtual_stops.empty:
|
||||
virtual_stops["time"] = self._estimate_times(path, virtual_stops['distance'])
|
||||
logger.debug("Virtual stops found: {}".format(virtual_stops))
|
||||
if len(virtual_stops) > MAX_STOPS_PER_TRIP:
|
||||
# in case we found more than MAX_STOPS_PER_TRIP, we retain first and last
|
||||
# half of MAX_STOPS_PER_TRIP
|
||||
virtual_stops = virtual_stops.iloc[np.r_[0:int(MAX_STOPS_PER_TRIP/2), int(MAX_STOPS_PER_TRIP/2):]]
|
||||
|
||||
trip_id = f"{carpool.agency}:{carpool.id}"
|
||||
stop_times = self._stops_and_stop_times(carpool.departureTime, trip_id, virtual_stops)
|
||||
|
||||
enhanced_carpool = carpool.copy()
|
||||
enhanced_carpool.stops = stop_times
|
||||
enhanced_carpool.path = lineString_wgs84
|
||||
return enhanced_carpool
|
||||
|
||||
def _convert_stop_times(self, carpool):
|
||||
|
||||
stop_times = [GtfsStopTime(
|
||||
self._trip_id(carpool),
|
||||
stop.arrivalTime,
|
||||
stop.departureTime,
|
||||
stop.id,
|
||||
seq_nr+1,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_dropoff else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_pickup else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE)
|
||||
for seq_nr, stop in enumerate(carpool.stops)]
|
||||
return stop_times
|
||||
|
||||
def _path_for_ride(self, carpool):
|
||||
points = self._stop_coords(carpool.stops)
|
||||
return self.router.path_for_stops(points)
|
||||
|
||||
def _stop_coords(self, stops):
|
||||
# Retrieve coordinates of all officially announced stops (start, intermediate, target)
|
||||
return [Point(stop.lon, stop.lat) for stop in stops]
|
||||
|
||||
def _estimate_times(self, path, distances_from_start):
|
||||
cumulated_distance = 0
|
||||
cumulated_time = 0
|
||||
stop_times = []
|
||||
instructions = path["instructions"]
|
||||
|
||||
cnt = 0
|
||||
instr_distance = instructions[cnt]["distance"]
|
||||
instr_time = instructions[cnt]["time"]
|
||||
|
||||
for distance in distances_from_start:
|
||||
while cnt < len(instructions) and cumulated_distance + instructions[cnt]["distance"] < distance:
|
||||
cumulated_distance = cumulated_distance + instructions[cnt]["distance"]
|
||||
cumulated_time = cumulated_time + instructions[cnt]["time"]
|
||||
cnt = cnt + 1
|
||||
|
||||
if cnt < len(instructions):
|
||||
if instructions[cnt]["distance"] ==0:
|
||||
raise RoutingException("Origin and destinaction too close")
|
||||
percent_dist = (distance - cumulated_distance) / instructions[cnt]["distance"]
|
||||
stop_time = cumulated_time + percent_dist * instructions[cnt]["time"]
|
||||
stop_times.append(stop_time)
|
||||
else:
|
||||
logger.debug("distance {} exceeds total length {}, using max arrival time {}".format(distance, cumulated_distance, cumulated_time))
|
||||
stop_times.append(cumulated_time)
|
||||
return stop_times
|
||||
|
||||
def _stops_and_stop_times(self, start_time, trip_id, stops_frame):
|
||||
# Assumptions:
|
||||
# arrival_time = departure_time
|
||||
# pickup_type, drop_off_type for origin: = coordinate/none
|
||||
# pickup_type, drop_off_type for destination: = none/coordinate
|
||||
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
|
||||
number_of_stops = len(stops_frame.index)
|
||||
total_distance = stops_frame.iloc[number_of_stops-1]["distance"]
|
||||
|
||||
first_stop_time = GtfsTimeDelta(hours = start_time.hour, minutes = start_time.minute, seconds = start_time.second)
|
||||
stop_times = []
|
||||
seq_nr = 0
|
||||
for i in range(0, number_of_stops):
|
||||
current_stop = stops_frame.iloc[i]
|
||||
|
||||
if not current_stop.id:
|
||||
continue
|
||||
elif i == 0:
|
||||
if (stops_frame.iloc[1].time-current_stop.time) < 1000:
|
||||
# skip custom stop if there is an official stop very close by
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
else:
|
||||
if (current_stop.time-stops_frame.iloc[i-1].time) < 5000 and not i==1 and not is_carpooling_stop(current_stop.id, current_stop.stop_name):
|
||||
# skip latter stop if it's very close (<5 seconds drive) by the preceding
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
trip_time = timedelta(milliseconds=int(current_stop.time))
|
||||
is_dropoff = self._is_dropoff_stop(current_stop, total_distance)
|
||||
is_pickup = self._is_pickup_stop(current_stop, total_distance)
|
||||
# TODO would be nice if possible to publish a minimum shared distance
|
||||
pickup_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_pickup else STOP_TIMES_STOP_TYPE_NONE
|
||||
dropoff_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_dropoff else STOP_TIMES_STOP_TYPE_NONE
|
||||
|
||||
if is_pickup and not is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_pickup
|
||||
elif not is_pickup and is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_dropoff
|
||||
else:
|
||||
pickup_dropoff = PickupDropoffType.pickup_and_dropoff
|
||||
|
||||
next_stop_time = first_stop_time + trip_time
|
||||
seq_nr += 1
|
||||
stop_times.append(StopTime(**{
|
||||
'arrivalTime': str(next_stop_time),
|
||||
'departureTime': str(next_stop_time),
|
||||
'id': current_stop.id,
|
||||
'pickup_dropoff': pickup_dropoff,
|
||||
'name': str(current_stop.stop_name),
|
||||
'lat': current_stop.y,
|
||||
'lon': current_stop.x
|
||||
}))
|
||||
|
||||
return stop_times
|
||||
|
||||
def _is_dropoff_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] >= 0.5 * total_distance
|
||||
|
||||
def _is_pickup_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] < 0.5 * total_distance
|
||||
|
||||
def load_carpool(agency_id: str, carpool_id: str, folder: str ='data/enhanced') -> Carpool:
|
||||
with open(f'{folder}/{agency_id}/{carpool_id}.json', 'r', encoding='utf-8') as f:
|
||||
dict = json.load(f)
|
||||
carpool = Carpool(**dict)
|
||||
return carpool
|
||||
|
||||
def carpool_exists(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.path.exists(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
|
||||
def remove_carpool_file(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.remove(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
stop_id;stop_code;stop_lat;stop_lon;stop_name
|
||||
mfdz:x;x;52.11901;14.2;Stop x
|
||||
mfdz:y;y;53.1;14.01;Stop y
|
||||
mfdz:z;z;54.11;14.0;Stop z
|
||||
mfdz:Ang001;Ang001;53.11901;14.015776;Mitfahrbank Biesenbrow
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"data": {
|
||||
"pointsOfInterest": [
|
||||
{
|
||||
"id": "14622",
|
||||
"externalId": "bbnavi:12073:0001",
|
||||
"name": "Parkbank",
|
||||
"description": "Parkbank",
|
||||
"dataProvider": {
|
||||
"id": "1",
|
||||
"name": "Administrator"
|
||||
},
|
||||
"addresses": [
|
||||
{
|
||||
"street": "Hauptstrasse",
|
||||
"city": "Wittenberge",
|
||||
"zip": "12345",
|
||||
"geoLocation": {
|
||||
"latitude": 52.9932971109789,
|
||||
"longitude": 11.767383582547
|
||||
}
|
||||
}
|
||||
],
|
||||
"openStreetMap": {
|
||||
"capacity": 112,
|
||||
"capacityCharging": "2",
|
||||
"capacityDisabled": "",
|
||||
"fee": "No",
|
||||
"lit": "Yes",
|
||||
"parking": "",
|
||||
"shelter": "No",
|
||||
"surface": "",
|
||||
"utilization": "",
|
||||
"website": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
from amarillo.app.services import stops
|
||||
from amarillo.app.models.Carpool import StopTime
|
||||
|
||||
def test_load_stops_from_file():
|
||||
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_load_csv_stops_from_web_():
|
||||
store = stops.StopsStore([{"url": "https://data.mfdz.de/mfdz/stops/custom.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_load_geojson_stops_from_web_():
|
||||
store = stops.StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_find_closest_stop():
|
||||
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
carpool_stop = StopTime(name="start", lat=53.1191, lon=14.01577)
|
||||
stop = store.find_closest_stop(carpool_stop, 1000)
|
||||
assert stop.name=='Mitfahrbank Biesenbrow'
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
from amarillo.app.tests.sampledata import cp1, carpool_repeating
|
||||
from amarillo.app.services.trips import TripStore
|
||||
from amarillo.app.services.stops import StopsStore
|
||||
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def test_trip_store_put_one_time_carpool():
|
||||
trip_store = TripStore(StopsStore())
|
||||
|
||||
t = trip_store.put_carpool(cp1)
|
||||
assert t != None
|
||||
assert len(t.stop_times) >= 2
|
||||
assert t.stop_times[0].stop_id == 'mfdz:12073:001'
|
||||
assert t.stop_times[-1].stop_id == 'de:12073:900340137::3'
|
||||
|
||||
def test_trip_store_put_repeating_carpool():
|
||||
trip_store = TripStore(StopsStore())
|
||||
|
||||
t = trip_store.put_carpool(carpool_repeating)
|
||||
assert t != None
|
||||
assert len(t.stop_times) >= 2
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
# separate file so that it can be imported without initializing FastAPI
|
||||
from amarillo.app.utils.container import container
|
||||
from amarillo.utils.container import container
|
||||
import logging
|
||||
|
||||
from amarillo.app.services.agencyconf import AgencyConfService, agency_conf_directory
|
||||
from amarillo.app.services.agencies import AgencyService
|
||||
from amarillo.app.services.regions import RegionService
|
||||
from amarillo.services.users import UserService, user_conf_directory
|
||||
from amarillo.services.agencies import AgencyService
|
||||
from amarillo.services.regions import RegionService
|
||||
|
||||
from amarillo.app.services.config import config
|
||||
from amarillo.services.config import config
|
||||
|
||||
from amarillo.app.utils.utils import assert_folder_exists
|
||||
from amarillo.utils.utils import assert_folder_exists
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -27,12 +27,12 @@ def create_required_directories():
|
|||
assert_folder_exists(f'data/{subdir}/{agency_id}')
|
||||
|
||||
# Agency configurations
|
||||
assert_folder_exists(agency_conf_directory)
|
||||
assert_folder_exists(user_conf_directory)
|
||||
|
||||
|
||||
def configure_services():
|
||||
container['agencyconf'] = AgencyConfService()
|
||||
logger.info("Loaded %d agency configuration(s)", len(container['agencyconf'].agency_id_to_agency_conf))
|
||||
container['users'] = UserService()
|
||||
logger.info("Loaded %d user configuration(s)", len(container['users'].user_id_to_user_conf))
|
||||
|
||||
container['agencies'] = AgencyService()
|
||||
logger.info("Loaded %d agencies", len(container['agencies'].agencies))
|
||||
|
|
@ -6,20 +6,18 @@ import uvicorn
|
|||
import mimetypes
|
||||
from starlette.staticfiles import StaticFiles
|
||||
|
||||
|
||||
from amarillo.app.utils.utils import copy_static_files
|
||||
from amarillo.utils.utils import copy_static_files
|
||||
#this has to run before app.configuration is imported, otherwise we get validation error for config because the config file is not copied yet
|
||||
copy_static_files(["conf", "static", "templates", "logging.conf", "config"])
|
||||
|
||||
import amarillo.plugins
|
||||
from amarillo.app.configuration import configure_services, configure_admin_token
|
||||
from amarillo.app.routers import carpool, agency, agencyconf, region
|
||||
from amarillo.configuration import configure_services, configure_admin_token
|
||||
from amarillo.routers import carpool, agency, region, users
|
||||
import amarillo.services.oauth2 as oauth2
|
||||
from fastapi import FastAPI
|
||||
from amarillo.app.views import home
|
||||
|
||||
|
||||
# https://pydantic-docs.helpmanual.io/usage/settings/
|
||||
# from amarillo.app.views import home
|
||||
from amarillo.views import home
|
||||
|
||||
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
|
||||
logger = logging.getLogger("main")
|
||||
|
|
@ -57,6 +55,10 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
|
|||
},
|
||||
}],
|
||||
servers=[
|
||||
{
|
||||
"description": "MobiData BW Amarillo service",
|
||||
"url": "https://amarillo.mobidata-bw.de"
|
||||
},
|
||||
{
|
||||
"description": "DABB bbnavi Amarillo service",
|
||||
"url": "https://amarillo.bbnavi.de"
|
||||
|
|
@ -69,6 +71,10 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
|
|||
"description": "Dev server for development",
|
||||
"url": "https://amarillo-dev.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Server for Mitanand project",
|
||||
"url": "https://mitanand.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Localhost for development",
|
||||
"url": "http://localhost:8000"
|
||||
|
|
@ -79,8 +85,9 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
|
|||
|
||||
app.include_router(carpool.router)
|
||||
app.include_router(agency.router)
|
||||
app.include_router(agencyconf.router)
|
||||
app.include_router(users.router)
|
||||
app.include_router(region.router)
|
||||
app.include_router(oauth2.router)
|
||||
|
||||
|
||||
def iter_namespace(ns_pkg):
|
||||
|
|
@ -93,14 +100,14 @@ def load_plugins():
|
|||
for finder, name, ispkg
|
||||
in iter_namespace(amarillo.plugins)
|
||||
}
|
||||
print(f"Discovered plugins: {list(discovered_plugins.keys())}")
|
||||
logger.info(f"Discovered plugins: {list(discovered_plugins.keys())}")
|
||||
|
||||
for name, module in discovered_plugins.items():
|
||||
if hasattr(module, "setup"):
|
||||
print(f"Running setup function for {name}")
|
||||
logger.info(f"Running setup function for {name}")
|
||||
module.setup(app)
|
||||
|
||||
else: print(f"Did not find setup function for {name}")
|
||||
else: logger.info(f"Did not find setup function for {name}")
|
||||
|
||||
def configure():
|
||||
configure_admin_token()
|
||||
|
|
@ -121,4 +128,3 @@ if __name__ == "__main__":
|
|||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
else:
|
||||
configure()
|
||||
pass
|
||||
|
|
@ -4,7 +4,7 @@ from typing import List, Union, Set, Optional, Tuple
|
|||
from datetime import time
|
||||
from pydantic import BaseModel, Field
|
||||
from geojson_pydantic.geometries import LineString
|
||||
from enum import Enum
|
||||
from enum import Enum, IntEnum
|
||||
|
||||
NumType = Union[float, int]
|
||||
|
||||
|
|
@ -24,6 +24,15 @@ class PickupDropoffType(str, Enum):
|
|||
only_pickup = "only_pickup"
|
||||
only_dropoff = "only_dropoff"
|
||||
|
||||
class YesNoEnum(IntEnum):
|
||||
yes = 1
|
||||
no = 2
|
||||
|
||||
class LuggageSize(IntEnum):
|
||||
small = 1
|
||||
medium = 2
|
||||
large = 3
|
||||
|
||||
class StopTime(BaseModel):
|
||||
id: Optional[str] = Field(
|
||||
None,
|
||||
|
|
@ -111,6 +120,82 @@ class Region(BaseModel):
|
|||
description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]",
|
||||
examples=[[10.5,49.2,11.3,51.3]])
|
||||
|
||||
class RidesharingInfo(BaseModel):
|
||||
number_free_seats: int = Field(
|
||||
description="Number of free seats",
|
||||
ge=0,
|
||||
examples=[3])
|
||||
|
||||
same_gender: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Trip only for same gender:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[1])
|
||||
luggage_size: Optional[LuggageSize] = Field(
|
||||
None,
|
||||
description="Size of the luggage:"
|
||||
"1: small"
|
||||
"2: medium"
|
||||
"3: large",
|
||||
examples=[3])
|
||||
animal_car: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Animals in Car allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
car_model: Optional[str] = Field(
|
||||
None,
|
||||
description="Car model",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["Golf"])
|
||||
car_brand: Optional[str] = Field(
|
||||
None,
|
||||
description="Car brand",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["VW"])
|
||||
|
||||
creation_date: datetime = Field(
|
||||
description="Date when trip was created",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
|
||||
smoking: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Smoking allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
payment_method: Optional[str] = Field(
|
||||
None,
|
||||
description="Method of payment",
|
||||
min_length=1,
|
||||
max_length=48)
|
||||
|
||||
class Driver(BaseModel):
|
||||
driver_id: Optional[str] = Field(
|
||||
None,
|
||||
description="Identifies the driver.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
pattern='^[a-zA-Z0-9_-]+$',
|
||||
examples=["789"])
|
||||
profile_picture: Optional[HttpUrl] = Field(
|
||||
None,
|
||||
description="URL that contains the profile picture",
|
||||
examples=["https://mfdz.de/driver/789/picture"])
|
||||
rating: Optional[int] = Field(
|
||||
None,
|
||||
description="Rating of the driver from 1 to 5."
|
||||
"0 no rating yet",
|
||||
ge=0,
|
||||
le=5,
|
||||
examples=[5])
|
||||
|
||||
class Agency(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the agency.",
|
||||
|
|
@ -196,6 +281,17 @@ class Carpool(BaseModel):
|
|||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
driver: Optional[Driver] = Field(
|
||||
None,
|
||||
description="Driver data",
|
||||
examples=["""
|
||||
{
|
||||
"driver_id": "123",
|
||||
"profile_picture": "https://mfdz.de/driver/789/picture",
|
||||
"rating": 5
|
||||
}
|
||||
"""])
|
||||
|
||||
deeplink: HttpUrl = Field(
|
||||
description="Link to an information page providing detail information "
|
||||
"for this offer, and, especially, an option to book the "
|
||||
|
|
@ -245,7 +341,22 @@ class Carpool(BaseModel):
|
|||
"published.",
|
||||
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
|
||||
'"sunday"]'])
|
||||
|
||||
route_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Route color designation that matches public facing material. "
|
||||
"The color difference between route_color and route_text_color "
|
||||
"should provide sufficient contrast when viewed on a black and "
|
||||
"white screen.",
|
||||
examples=["0039A6"])
|
||||
route_text_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Legible color to use for text drawn against a background of "
|
||||
"route_color. The color difference between route_color and "
|
||||
"route_text_color should provide sufficient contrast when "
|
||||
"viewed on a black and white screen.",
|
||||
examples=["D4D2D2"])
|
||||
path: Optional[LineString] = Field(
|
||||
None, description="Optional route geometry as json LineString.")
|
||||
|
||||
|
|
@ -257,6 +368,18 @@ class Carpool(BaseModel):
|
|||
"purge outdated offers (e.g. older than 180 days). If not "
|
||||
"passed, the service may assume 'now'",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
additional_ridesharing_info: Optional[RidesharingInfo] = Field(
|
||||
None,
|
||||
description="Extension of GRFS to the GTFS standard",
|
||||
examples=["""
|
||||
{
|
||||
"number_free_seats": 2,
|
||||
"creation_date": "2022-02-13T20:20:39+00:00",
|
||||
"same_gender": 2,
|
||||
"smoking": 1,
|
||||
"luggage_size": 3
|
||||
}
|
||||
"""])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Carpool",
|
||||
# description ...
|
||||
38
amarillo/models/User.py
Normal file
38
amarillo/models/User.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from typing import Annotated, Optional, List
|
||||
from pydantic import ConfigDict, BaseModel, Field
|
||||
class User(BaseModel):
|
||||
#TODO: add attributes admin, permissions, fullname, email
|
||||
|
||||
user_id: str = Field(
|
||||
description="ID of the agency that uses this token.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
api_key: Optional[str] = Field(None,
|
||||
description="The agency's API key for using the API",
|
||||
min_length=20,
|
||||
max_length=256,
|
||||
pattern=r'^[a-zA-Z0-9]+$',
|
||||
examples=["d8yLuY4DqMEUCLcfJASi"])
|
||||
password: Optional[str] = Field(None,
|
||||
description="The agency's password for generating JWT tokens",
|
||||
min_length=8,
|
||||
max_length=256,
|
||||
examples=["$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"])
|
||||
permissions: Optional[List[Annotated[str, Field(pattern=r'^[a-z0-9-]+(:[a-z]+)?$')]]] = Field([],
|
||||
description="The permissions of this user, a list of strings in the format <agency:operation> or <operation>",
|
||||
max_length=256,
|
||||
# pattern=r'^[a-zA-Z0-9]+(:[a-zA-Z]+)?$', #TODO
|
||||
examples=["ride2go:read", "all:read", "admin", "geojson"])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Agency Configuration",
|
||||
"description": "Configuration for an agency.",
|
||||
"example":
|
||||
{
|
||||
"agency_id": "mfdz",
|
||||
"api_key": "d8yLuY4DqMEUCLcfJASi",
|
||||
"password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"
|
||||
}
|
||||
})
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Test notebook for discovering and importing plugins"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'amarillo.plugins.metrics': <module 'amarillo.plugins.metrics' from '/home/user/amarillo/amarillo-plugins/amarillo-metrics/amarillo/plugins/metrics/__init__.py'>}"
|
||||
]
|
||||
},
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import importlib\n",
|
||||
"import pkgutil\n",
|
||||
"\n",
|
||||
"import amarillo.plugins # FIXME this namespace does not exist if there are 0 plugins installed\n",
|
||||
"\n",
|
||||
"def iter_namespace(ns_pkg):\n",
|
||||
" # Source: https://packaging.python.org/guides/creating-and-discovering-plugins/\n",
|
||||
" return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + \".\")\n",
|
||||
"\n",
|
||||
"discovered_plugins = {\n",
|
||||
" name: importlib.import_module(name)\n",
|
||||
" for finder, name, ispkg\n",
|
||||
" in iter_namespace(amarillo.plugins)\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"discovered_plugins"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"['__name__',\n",
|
||||
" '__doc__',\n",
|
||||
" '__package__',\n",
|
||||
" '__loader__',\n",
|
||||
" '__spec__',\n",
|
||||
" '__path__',\n",
|
||||
" '__file__',\n",
|
||||
" '__cached__',\n",
|
||||
" '__builtins__',\n",
|
||||
" 'metrics',\n",
|
||||
" 'json',\n",
|
||||
" 'logging',\n",
|
||||
" 'os',\n",
|
||||
" 'random',\n",
|
||||
" 'Callable',\n",
|
||||
" 'APIRouter',\n",
|
||||
" 'HTTPException',\n",
|
||||
" 'Depends',\n",
|
||||
" 'Request',\n",
|
||||
" 'datetime',\n",
|
||||
" 'generate_latest',\n",
|
||||
" 'Gauge',\n",
|
||||
" 'Counter',\n",
|
||||
" 'Info',\n",
|
||||
" 'FastAPI',\n",
|
||||
" 'HTTPBasic',\n",
|
||||
" 'HTTPBasicCredentials',\n",
|
||||
" 'PlainTextResponse',\n",
|
||||
" 'secrets',\n",
|
||||
" 'logger',\n",
|
||||
" 'security',\n",
|
||||
" 'amarillo_trips_number_total',\n",
|
||||
" 'router']"
|
||||
]
|
||||
},
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"metrics = discovered_plugins['amarillo.plugins.metrics']\n",
|
||||
"\n",
|
||||
"metrics.__dir__()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
|
|
@ -4,13 +4,14 @@ from typing import List
|
|||
|
||||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
|
||||
from amarillo.app.models.Carpool import Carpool, Agency
|
||||
from amarillo.app.routers.agencyconf import verify_api_key, verify_admin_api_key, verify_permission_for_same_agency_or_admin
|
||||
from amarillo.models.Carpool import Carpool, Agency
|
||||
from amarillo.models.User import User
|
||||
from amarillo.services.oauth2 import get_current_user, verify_permission
|
||||
# TODO should move this to service
|
||||
from amarillo.app.routers.carpool import store_carpool, delete_agency_carpools_older_than
|
||||
from amarillo.app.services.agencies import AgencyService
|
||||
from amarillo.app.services.importing.ride2go import import_ride2go
|
||||
from amarillo.app.utils.container import container
|
||||
from amarillo.routers.carpool import store_carpool, delete_agency_carpools_older_than
|
||||
from amarillo.services.agencies import AgencyService
|
||||
from amarillo.services.importing.ride2go import import_ride2go
|
||||
from amarillo.utils.container import container
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -32,7 +33,7 @@ router = APIRouter(
|
|||
status.HTTP_404_NOT_FOUND: {"description": "Agency not found"},
|
||||
},
|
||||
)
|
||||
async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key)) -> Agency:
|
||||
async def get_agency(agency_id: str, requesting_user: User = Depends(get_current_user)) -> Agency:
|
||||
agencies: AgencyService = container['agencies']
|
||||
agency = agencies.get_agency(agency_id)
|
||||
agency_exists = agency is not None
|
||||
|
|
@ -52,6 +53,7 @@ async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key
|
|||
operation_id="sync",
|
||||
summary="Synchronizes all carpool offers",
|
||||
response_model=List[Carpool],
|
||||
response_model_exclude_none=True,
|
||||
responses={
|
||||
status.HTTP_200_OK: {
|
||||
"description": "Carpool created"},
|
||||
|
|
@ -60,8 +62,8 @@ async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key
|
|||
status.HTTP_500_INTERNAL_SERVER_ERROR: {
|
||||
"description": "Import error"}
|
||||
})
|
||||
async def sync(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)) -> List[Carpool]:
|
||||
await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
|
||||
async def sync(agency_id: str, requesting_user: User = Depends(get_current_user)) -> List[Carpool]:
|
||||
verify_permission(f"{agency_id}:sync")
|
||||
|
||||
if agency_id == "ride2go":
|
||||
import_function = import_ride2go
|
||||
|
|
@ -5,12 +5,13 @@ import os.path
|
|||
import re
|
||||
from glob import glob
|
||||
|
||||
from fastapi import APIRouter, Body, Header, HTTPException, status, Depends
|
||||
from fastapi import APIRouter, Body, HTTPException, status, Depends
|
||||
from datetime import datetime
|
||||
|
||||
from amarillo.app.models.Carpool import Carpool
|
||||
from amarillo.app.routers.agencyconf import verify_api_key, verify_permission_for_same_agency_or_admin
|
||||
from amarillo.app.tests.sampledata import examples
|
||||
from amarillo.models.Carpool import Carpool
|
||||
from amarillo.models.User import User
|
||||
from amarillo.services.oauth2 import get_current_user, verify_permission
|
||||
from amarillo.tests.sampledata import examples
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -25,21 +26,20 @@ router = APIRouter(
|
|||
summary="Add a new or update existing carpool",
|
||||
description="Carpool object to be created or updated",
|
||||
response_model=Carpool,
|
||||
response_model_exclude_none=True,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": "Agency does not exist"},
|
||||
|
||||
})
|
||||
async def post_carpool(carpool: Carpool = Body(..., examples=examples),
|
||||
requesting_agency_id: str = Depends(verify_api_key)) -> Carpool:
|
||||
await verify_permission_for_same_agency_or_admin(carpool.agency, requesting_agency_id)
|
||||
requesting_user: User = Depends(get_current_user)) -> Carpool:
|
||||
verify_permission(f"{carpool.agency}:write", requesting_user)
|
||||
|
||||
logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
|
||||
await assert_agency_exists(carpool.agency)
|
||||
|
||||
await set_lastUpdated_if_unset(carpool)
|
||||
|
||||
await save_carpool(carpool)
|
||||
await store_carpool(carpool)
|
||||
|
||||
return carpool
|
||||
|
||||
|
|
@ -48,12 +48,15 @@ async def post_carpool(carpool: Carpool = Body(..., examples=examples),
|
|||
operation_id="getcarpoolById",
|
||||
summary="Find carpool by ID",
|
||||
response_model=Carpool,
|
||||
response_model_exclude_none=True,
|
||||
description="Find carpool by ID",
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"},
|
||||
},
|
||||
)
|
||||
async def get_carpool(agency_id: str, carpool_id: str, api_key: str = Depends(verify_api_key)) -> Carpool:
|
||||
async def get_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)) -> Carpool:
|
||||
verify_permission(f"{agency_id}:read", requesting_user)
|
||||
|
||||
logger.info(f"Get trip {agency_id}:{carpool_id}.")
|
||||
await assert_agency_exists(agency_id)
|
||||
await assert_carpool_exists(agency_id, carpool_id)
|
||||
|
|
@ -72,8 +75,8 @@ async def get_carpool(agency_id: str, carpool_id: str, api_key: str = Depends(ve
|
|||
"description": "Carpool or agency not found"},
|
||||
},
|
||||
)
|
||||
async def delete_carpool(agency_id: str, carpool_id: str, requesting_agency_id: str = Depends(verify_api_key)):
|
||||
await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
|
||||
async def delete_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)):
|
||||
verify_permission(f"{agency_id}:write", requesting_user)
|
||||
|
||||
logger.info(f"Delete trip {agency_id}:{carpool_id}.")
|
||||
await assert_agency_exists(agency_id)
|
||||
|
|
@ -90,10 +93,30 @@ async def _delete_carpool(agency_id: str, carpool_id: str):
|
|||
logger.info(f"Saved carpool {agency_id}:{carpool_id} in trash.")
|
||||
os.remove(f"data/carpool/{agency_id}/{carpool_id}.json")
|
||||
|
||||
try:
|
||||
from amarillo.plugins.metrics import trips_deleted_counter
|
||||
trips_deleted_counter.inc()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
async def store_carpool(carpool: Carpool) -> Carpool:
|
||||
carpool_exists = os.path.exists(f"data/carpool/{carpool.agency}/{carpool.id}.json")
|
||||
|
||||
await set_lastUpdated_if_unset(carpool)
|
||||
await save_carpool(carpool)
|
||||
|
||||
try:
|
||||
from amarillo.plugins.metrics import trips_created_counter, trips_updated_counter
|
||||
if(carpool_exists):
|
||||
# logger.info("Incrementing trips updated")
|
||||
trips_updated_counter.inc()
|
||||
else:
|
||||
# logger.info("Incrementing trips created")
|
||||
trips_created_counter.inc()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return carpool
|
||||
|
||||
async def set_lastUpdated_if_unset(carpool):
|
||||
|
|
@ -114,7 +137,7 @@ async def save_carpool(carpool, folder: str = 'data/carpool'):
|
|||
|
||||
|
||||
async def assert_agency_exists(agency_id: str):
|
||||
agency_exists = os.path.exists(f"conf/agency/{agency_id}.json")
|
||||
agency_exists = os.path.exists(f"data/agency/{agency_id}.json")
|
||||
if not agency_exists:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
55
amarillo/routers/region.py
Normal file
55
amarillo/routers/region.py
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
|
||||
from amarillo.models.Carpool import Region
|
||||
from amarillo.services.regions import RegionService
|
||||
from amarillo.utils.container import container
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/region",
|
||||
tags=["region"]
|
||||
)
|
||||
|
||||
@router.get("/",
|
||||
operation_id="getRegions",
|
||||
summary="Return all regions",
|
||||
response_model=List[Region],
|
||||
responses={
|
||||
},
|
||||
)
|
||||
async def get_regions() -> List[Region]:
|
||||
service: RegionService = container['regions']
|
||||
|
||||
return list(service.regions.values())
|
||||
|
||||
@router.get("/{region_id}",
|
||||
operation_id="getRegionById",
|
||||
summary="Find region by ID",
|
||||
response_model=Region,
|
||||
description="Find region by ID",
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
|
||||
},
|
||||
)
|
||||
async def get_region(region_id: str) -> Region:
|
||||
region = _assert_region_exists(region_id)
|
||||
logger.info(f"Get region {region_id}.")
|
||||
|
||||
return region
|
||||
|
||||
def _assert_region_exists(region_id: str) -> Region:
|
||||
regions: RegionService = container['regions']
|
||||
region = regions.get_region(region_id)
|
||||
region_exists = region is not None
|
||||
|
||||
if not region_exists:
|
||||
message = f"Region with id {region_id} does not exist."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
|
||||
|
||||
return region
|
||||
71
amarillo/routers/users.py
Normal file
71
amarillo/routers/users.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Header, Depends
|
||||
|
||||
from amarillo.models.User import User
|
||||
from amarillo.services.users import UserService
|
||||
from amarillo.services.oauth2 import get_current_user, verify_permission
|
||||
from amarillo.services.config import config
|
||||
from amarillo.utils.container import container
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/users",
|
||||
tags=["users"]
|
||||
)
|
||||
|
||||
# This endpoint is not shown in PROD installations, only in development
|
||||
# TODO make this an explicit config option
|
||||
include_in_schema = config.env != 'PROD'
|
||||
|
||||
|
||||
@router.get("/",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="getUserIdsWhichHaveAConfiguration",
|
||||
summary="Get user which have a configuration",
|
||||
response_model=List[str],
|
||||
description="Returns the user_ids but not the details.",
|
||||
status_code=status.HTTP_200_OK)
|
||||
async def get_user_ids(requesting_user: User = Depends(get_current_user)) -> [str]:
|
||||
return container['users'].get_user_ids()
|
||||
|
||||
|
||||
@router.post("/",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="postNewUserConf",
|
||||
summary="Post a new User")
|
||||
async def post_user_conf(user_conf: User, requesting_user: User = Depends(get_current_user)):
|
||||
verify_permission("admin", requesting_user)
|
||||
user_service: UserService = container['users']
|
||||
user_service.add(user_conf)
|
||||
|
||||
# TODO 400->403
|
||||
@router.delete("/{user_id}",
|
||||
include_in_schema=include_in_schema,
|
||||
operation_id="deleteUser",
|
||||
status_code=status.HTTP_200_OK,
|
||||
summary="Delete configuration of a user. Returns true if the token for the user existed, "
|
||||
"false if it didn't exist."
|
||||
)
|
||||
async def delete_user(user_id: str, requesting_user: User = Depends(get_current_user)):
|
||||
user_may_delete_own = requesting_user.user_id == user_id
|
||||
admin_may_delete_everything = "admin" in requesting_user.permissions
|
||||
is_permitted = user_may_delete_own or admin_may_delete_everything
|
||||
|
||||
if not is_permitted:
|
||||
message = f"User '{requesting_user.user_id} can not delete the configuration for {user_id}"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
user_service: UserService = container['users']
|
||||
|
||||
agency_exists = user_id in user_service.get_user_ids()
|
||||
|
||||
if not agency_exists:
|
||||
message = f"No config for {user_id}"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
user_service.delete(user_id)
|
||||
|
|
@ -2,7 +2,7 @@ import json
|
|||
from glob import glob
|
||||
from typing import Dict
|
||||
|
||||
from amarillo.app.models.Carpool import Agency
|
||||
from amarillo.models.Carpool import Agency
|
||||
|
||||
# TODO FG HB this service should also listen to pyinotify
|
||||
# because the (updated) agencies are needed in the enhancer
|
||||
|
|
@ -12,8 +12,7 @@ class AgencyService:
|
|||
|
||||
def __init__(self):
|
||||
self.agencies: Dict[str, Agency] = {}
|
||||
|
||||
for agency_file_name in glob('conf/agency/*.json'):
|
||||
for agency_file_name in glob('data/agency/*.json'):
|
||||
with open(agency_file_name) as agency_file:
|
||||
dict = json.load(agency_file)
|
||||
agency = Agency(**dict)
|
||||
|
|
@ -2,9 +2,9 @@ import json
|
|||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from amarillo.app.models.Carpool import Carpool
|
||||
from amarillo.app.services.trips import TripStore
|
||||
from amarillo.app.utils.utils import yesterday, is_older_than_days
|
||||
from amarillo.models.Carpool import Carpool
|
||||
from amarillo.services.trips import TripStore
|
||||
from amarillo.utils.utils import yesterday, is_older_than_days
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -37,6 +37,11 @@ class CarpoolService():
|
|||
if cp and self.is_outdated(cp):
|
||||
logger.info("Purge outdated offer %s", key)
|
||||
self.delete(cp.agency, cp.id)
|
||||
try:
|
||||
from amarillo.plugins.metrics import trips_deleted_counter
|
||||
trips_deleted_counter.inc()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def get(self, agency_id: str, carpool_id: str):
|
||||
return self.carpools.get(f"{agency_id}:{carpool_id}")
|
||||
|
|
@ -6,6 +6,7 @@ class Config(BaseSettings):
|
|||
admin_token: str
|
||||
ride2go_query_data: str
|
||||
env: str = 'DEV'
|
||||
|
||||
graphhopper_base_url: str = 'https://api.mfdz.de/gh'
|
||||
stop_sources_file: str = 'data/stop_sources.json'
|
||||
|
||||
config = Config(_env_file='config', _env_file_encoding='utf-8')
|
||||
|
|
@ -2,10 +2,10 @@ import logging
|
|||
from typing import List
|
||||
|
||||
import requests
|
||||
from amarillo.app.models.Carpool import Carpool, StopTime
|
||||
from amarillo.app.services.config import config
|
||||
from amarillo.models.Carpool import Carpool, StopTime
|
||||
from amarillo.services.config import config
|
||||
|
||||
from amarillo.app.services.secrets import secrets
|
||||
from amarillo.services.secrets import secrets
|
||||
import re
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
75
amarillo/services/mocks.py
Normal file
75
amarillo/services/mocks.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import time
|
||||
|
||||
|
||||
mock_added = {
|
||||
'trip': {
|
||||
'tripId': 'mifaz:carpool-update-123',
|
||||
'startTime': '07:33:00',
|
||||
'startDate': '20220509',
|
||||
'scheduleRelationship': 'ADDED',
|
||||
'routeId': 'mifaz:carpool-update-123',
|
||||
'[transit_realtime.trip_descriptor]': {
|
||||
'routeUrl' : 'http://myurl',
|
||||
'agencyId' : 'mifaz',
|
||||
'route_long_name' : 'Angermünde nach Biesenbrow'}
|
||||
},
|
||||
'stopTimeUpdate': [{
|
||||
'stopSequence': 1,
|
||||
'arrival': {
|
||||
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'departure': {
|
||||
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'stopId': 'de:12073:900340108',
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'NONE',
|
||||
'pickupType': 'COORDINATE_WITH_DRIVER'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'stopSequence': 2,
|
||||
'arrival': {
|
||||
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'departure': {
|
||||
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
|
||||
'uncertainty': 600
|
||||
},
|
||||
'stopId': 'mfdz:Ang001',
|
||||
'scheduleRelationship': 'SCHEDULED',
|
||||
'stop_time_properties': {
|
||||
'[transit_realtime.stop_time_properties]': {
|
||||
'dropoffType': 'COORDINATE_WITH_DRIVER',
|
||||
'pickupType': 'NONE'
|
||||
}
|
||||
}
|
||||
}]
|
||||
}
|
||||
|
||||
mock_trip_updated_added = {
|
||||
'id': 'mifaz:carpool-update-123',
|
||||
'tripUpdate': mock_added
|
||||
}
|
||||
|
||||
|
||||
mock_trip_updated_deleted = {
|
||||
'id': 'carpool-update-124',
|
||||
'tripUpdate': {
|
||||
'trip': {
|
||||
'tripId': '141',
|
||||
'startTime': '17:01:08',
|
||||
'startDate': '20220509',
|
||||
'scheduleRelationship': 'CANCELED',
|
||||
'routeId': '141'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
165
amarillo/services/oauth2.py
Normal file
165
amarillo/services/oauth2.py
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
# OAuth2 authentication based on https://fastapi.tiangolo.com/tutorial/security/oauth2-jwt/#__tabbed_4_2
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Annotated, Optional, Union
|
||||
import logging
|
||||
import logging.config
|
||||
|
||||
from fastapi import Depends, HTTPException, Header, status, APIRouter
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from jose import JWTError, jwt
|
||||
from pydantic import BaseModel
|
||||
from amarillo.models.User import User
|
||||
from amarillo.services.passwords import verify_password
|
||||
from amarillo.utils.container import container
|
||||
from amarillo.services.agencies import AgencyService
|
||||
from amarillo.services.users import UserService
|
||||
from amarillo.models.Carpool import Agency
|
||||
|
||||
from amarillo.services.secrets import secrets
|
||||
|
||||
SECRET_KEY = secrets.secret_key
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 7*24*60
|
||||
|
||||
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
|
||||
logger = logging.getLogger("main")
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
token_type: str
|
||||
|
||||
class TokenData(BaseModel):
|
||||
user_id: Union[str, None] = None
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
|
||||
async def verify_optional_api_key(X_API_Key: Optional[str] = Header(None)):
|
||||
if X_API_Key == None: return None
|
||||
return await verify_api_key(X_API_Key)
|
||||
|
||||
def authenticate_user(user_id: str, password: str):
|
||||
user_service : UserService = container['users']
|
||||
user_conf = user_service.user_id_to_user_conf.get(user_id, None)
|
||||
if not user_conf:
|
||||
return False
|
||||
|
||||
if not verify_password(password, user_conf.password):
|
||||
return False
|
||||
return user_id
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(minutes=15)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
async def get_current_user(token: str = Depends(oauth2_scheme), user_from_api_key: str = Depends(verify_optional_api_key)) -> User:
|
||||
if token:
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate OAuth2 credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user_id: str = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise credentials_exception
|
||||
token_data = TokenData(user_id=user_id)
|
||||
except JWTError:
|
||||
raise credentials_exception
|
||||
user_id = token_data.user_id
|
||||
if user_id is None:
|
||||
raise credentials_exception
|
||||
|
||||
user_service : UserService = container['users']
|
||||
return user_service.get_user(user_id)
|
||||
elif user_from_api_key:
|
||||
logger.info(f"API Key provided: {user_from_api_key}")
|
||||
user_service : UserService = container['users']
|
||||
return user_service.get_user(user_from_api_key)
|
||||
else:
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Not authenticated",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
raise credentials_exception
|
||||
|
||||
def verify_permission(permission: str, user: User):
|
||||
|
||||
def permissions_exception():
|
||||
return HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"User '{user.user_id}' does not have the permission '{permission}'",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
#user is admin
|
||||
if "admin" in user.permissions: return
|
||||
|
||||
#permission is an operation
|
||||
if ":" not in permission:
|
||||
if permission not in user.permissions:
|
||||
raise permissions_exception()
|
||||
|
||||
return
|
||||
|
||||
#permission is in agency:operation format
|
||||
def permission_matches(permission, user_permission):
|
||||
prescribed_agency, prescribed_operation = permission.split(":")
|
||||
given_agency, given_operation = user_permission.split(":")
|
||||
|
||||
return (prescribed_agency == given_agency or given_agency == "all") and (prescribed_operation == given_operation or given_operation == "all")
|
||||
|
||||
if any(permission_matches(permission, p) for p in user.permissions if ":" in p): return
|
||||
|
||||
raise permissions_exception()
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
# X_API_Key is upper case for OpenAPI
|
||||
async def verify_api_key(X_API_Key: str = Header(...)):
|
||||
user_service: UserService = container['users']
|
||||
|
||||
return user_service.check_api_key(X_API_Key)
|
||||
|
||||
@router.post("/token")
|
||||
async def login_for_access_token(
|
||||
form_data: Annotated[OAuth2PasswordRequestForm, Depends()]
|
||||
) -> Token:
|
||||
agency = authenticate_user(form_data.username, form_data.password)
|
||||
if not agency:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
access_token = create_access_token(
|
||||
data={"sub": agency}, expires_delta=access_token_expires
|
||||
)
|
||||
return Token(access_token=access_token, token_type="bearer")
|
||||
|
||||
# TODO: eventually remove this
|
||||
@router.get("/users/me/", response_model=Agency)
|
||||
async def read_users_me(
|
||||
current_agency: Annotated[Agency, Depends(get_current_user)]
|
||||
):
|
||||
agency_service : AgencyService = container['agencies']
|
||||
return agency_service.get_agency(agency_id=current_agency)
|
||||
|
||||
# TODO: eventually remove this
|
||||
@router.get("/users/me/items/")
|
||||
async def read_own_items(
|
||||
current_agency: Annotated[str, Depends(get_current_user)]
|
||||
):
|
||||
return [{"item_id": "Foo", "owner": current_agency}]
|
||||
10
amarillo/services/passwords.py
Normal file
10
amarillo/services/passwords.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from passlib.context import CryptContext
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
return pwd_context.hash(password)
|
||||
|
|
@ -2,15 +2,14 @@ import json
|
|||
from glob import glob
|
||||
from typing import Dict
|
||||
|
||||
from amarillo.app.models.Carpool import Region
|
||||
from amarillo.models.Carpool import Region
|
||||
|
||||
|
||||
class RegionService:
|
||||
|
||||
def __init__(self):
|
||||
self.regions: Dict[str, Region] = {}
|
||||
|
||||
for region_file_name in glob('conf/region/*.json'):
|
||||
for region_file_name in glob('data/region/*.json'):
|
||||
with open(region_file_name) as region_file:
|
||||
dict = json.load(region_file)
|
||||
region = Region(**dict)
|
||||
|
|
@ -1,12 +1,10 @@
|
|||
from typing import Dict
|
||||
from pydantic import Field
|
||||
from pydantic import Field, ConfigDict
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
# Example: secrets = { "mfdz": "some secret" }
|
||||
class Secrets(BaseSettings):
|
||||
model_config = ConfigDict(extra='allow')
|
||||
ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN')
|
||||
metrics_user: str = Field(None, env = 'METRICS_USER')
|
||||
metrics_password: str = Field(None, env = 'METRICS_PASSWORD')
|
||||
secret_key: str = Field(None, env = 'SECRET_KEY')
|
||||
|
||||
|
||||
# Read if file exists, otherwise no error (it's in .gitignore)
|
||||
115
amarillo/services/users.py
Normal file
115
amarillo/services/users.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import json
|
||||
import os
|
||||
from glob import glob
|
||||
from typing import Dict, List
|
||||
import logging
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from amarillo.models.User import User
|
||||
from amarillo.services.config import config
|
||||
from amarillo.services.passwords import get_password_hash
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
user_conf_directory = 'data/users'
|
||||
|
||||
|
||||
class UserService:
|
||||
|
||||
def __init__(self):
|
||||
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
|
||||
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
|
||||
self.user_id_to_user_conf: Dict[str, User] = {}
|
||||
self.api_key_to_user_id: Dict[str, str] = {}
|
||||
|
||||
for user_conf_file_name in glob(f'{user_conf_directory}/*.json'):
|
||||
with open(user_conf_file_name) as user_conf_file:
|
||||
dictionary = json.load(user_conf_file)
|
||||
|
||||
user_conf = User(**dictionary)
|
||||
|
||||
agency_id = user_conf.user_id
|
||||
api_key = user_conf.api_key
|
||||
|
||||
self.user_id_to_user_conf[agency_id] = user_conf
|
||||
if api_key is not None:
|
||||
self.api_key_to_user_id[api_key] = user_conf.user_id
|
||||
|
||||
def get_user(self, user_id: str) -> User:
|
||||
user_conf = self.user_id_to_user_conf.get(user_id)
|
||||
return user_conf
|
||||
|
||||
def check_api_key(self, api_key: str) -> str:
|
||||
"""Check if the API key is valid
|
||||
|
||||
The agencies' api keys are checked first, and the admin's key.
|
||||
|
||||
The agency_id or "admin" is returned for further checks in the caller if the
|
||||
request is permitted, like {agency_id} == agency_id.
|
||||
"""
|
||||
|
||||
agency_id = self.api_key_to_user_id.get(api_key)
|
||||
|
||||
is_agency = agency_id is not None
|
||||
|
||||
if is_agency:
|
||||
return agency_id
|
||||
|
||||
is_admin = api_key == config.admin_token
|
||||
|
||||
if is_admin:
|
||||
return "admin"
|
||||
|
||||
message = "X-API-Key header invalid"
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=400, detail=message)
|
||||
|
||||
def add(self, user_conf: User):
|
||||
|
||||
user_id = user_conf.user_id
|
||||
api_key = user_conf.api_key
|
||||
|
||||
agency_id_exists_already = self.user_id_to_user_conf.get(user_id) is not None
|
||||
|
||||
if agency_id_exists_already:
|
||||
message = f"Agency {user_id} exists already. To update, delete it first."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
agency_using_this_api_key_already = self.api_key_to_user_id.get(api_key)
|
||||
a_different_agency_is_using_this_api_key_already = \
|
||||
agency_using_this_api_key_already is not None and \
|
||||
agency_using_this_api_key_already != user_id
|
||||
|
||||
if a_different_agency_is_using_this_api_key_already:
|
||||
message = f"Duplicate API Key for {user_id} not permitted. Use a different key."
|
||||
logger.error(message)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
user_conf.password = get_password_hash(user_conf.password)
|
||||
|
||||
with open(f'{user_conf_directory}/{user_id}.json', 'w', encoding='utf-8') as f:
|
||||
f.write(user_conf.json())
|
||||
|
||||
self.user_id_to_user_conf[user_id] = user_conf
|
||||
self.api_key_to_user_id[api_key] = user_id
|
||||
|
||||
logger.info(f"Added configuration for user {user_id}.")
|
||||
|
||||
def get_user_ids(self) -> List[str]:
|
||||
return list(self.user_id_to_user_conf.keys())
|
||||
|
||||
def delete(self, user_id):
|
||||
|
||||
user_conf = self.user_id_to_user_conf.get(user_id)
|
||||
|
||||
api_key = user_conf.api_key
|
||||
|
||||
del self.api_key_to_user_id[api_key]
|
||||
|
||||
del self.user_id_to_user_conf[user_id]
|
||||
|
||||
os.remove(f'{user_conf_directory}/{user_id}.json')
|
||||
|
||||
logger.info(f"Deleted configuration for {user_id}.")
|
||||
5
amarillo/static/conf/stop_sources.json
Normal file
5
amarillo/static/conf/stop_sources.json
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
[
|
||||
{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50},
|
||||
{"url": "https://data.mfdz.de/mfdz/stops/stops_zhv.csv", "vicinity": 50},
|
||||
{"url": "https://data.mfdz.de/mfdz/stops/parkings_osm.csv", "vicinity": 500}
|
||||
]
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# Bounding-Box Germany
|
||||
ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }'
|
||||
env = 'PROD'
|
||||
graphhopper_base_url = 'https://api.mfdz.de/gh'
|
||||
stop_sources_file = 'conf/stop_sources.json'
|
||||
|
|
@ -2,14 +2,14 @@
|
|||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=consoleHandler
|
||||
keys=consoleHandler, fileHandler
|
||||
|
||||
[formatters]
|
||||
keys=simpleFormatter
|
||||
|
||||
[logger_root]
|
||||
level=INFO
|
||||
handlers=consoleHandler
|
||||
handlers=consoleHandler, fileHandler
|
||||
propagate=yes
|
||||
|
||||
[handler_consoleHandler]
|
||||
|
|
@ -18,5 +18,11 @@ level=DEBUG
|
|||
formatter=simpleFormatter
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_fileHandler]
|
||||
class=handlers.RotatingFileHandler
|
||||
level=ERROR
|
||||
formatter=simpleFormatter
|
||||
args=('error.log', 'a', 1000000, 3) # Filename, mode, maxBytes, backupCount
|
||||
|
||||
[formatter_simpleFormatter]
|
||||
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
|
||||
38
amarillo/tests/no_test_carpool.py
Normal file
38
amarillo/tests/no_test_carpool.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from amarillo.main import app
|
||||
from amarillo.tests.sampledata import carpool_1234, data1
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
# TODO FG: This test needs a clean temporary storage folder, not the hard coded data dir.
|
||||
def test_doc():
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_get_mfdz_0():
|
||||
response = client.get("/carpool/mfdz/0")
|
||||
assert response.status_code == 404
|
||||
assert response.json() == {"detail": "Carpool with agency mfdz and id 0 does not exist."}
|
||||
|
||||
|
||||
def test_delete_mfdz_0():
|
||||
response = client.delete("/carpool/mfdz/0")
|
||||
assert response.status_code == 404
|
||||
assert response.json() == {"detail": "Carpool with id 0 does not exist."}
|
||||
|
||||
|
||||
def test_post():
|
||||
response = client.get(f"/carpool/mfdz/{data1['id']}")
|
||||
assert response.status_code == 404, "The carpool should not exist yet"
|
||||
|
||||
response = client.post("/carpool/", json=data1)
|
||||
assert response.status_code == 200, "The first post must work with 200"
|
||||
|
||||
response = client.get(f"/carpool/mfdz/{data1['id']}")
|
||||
assert response.status_code == 200, "After post, the get must work"
|
||||
|
||||
response = client.delete(f"/carpool/mfdz/{data1['id']}")
|
||||
assert response.status_code == 200, "The first delete must work with 200"
|
||||
|
||||
response = client.delete(f"/carpool/mfdz/{data1['id']}")
|
||||
assert response.status_code == 404, "The second delete must fail"
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from amarillo.app.models.Carpool import Carpool, StopTime, Weekday
|
||||
from amarillo.models.Carpool import Carpool, StopTime, Weekday
|
||||
|
||||
# TODO use meanigful values for id and lat, lon
|
||||
stops_1234 = [
|
||||
37
amarillo/tests/test_permissions.py
Normal file
37
amarillo/tests/test_permissions.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from fastapi import HTTPException
|
||||
import pytest
|
||||
from amarillo.services.oauth2 import verify_permission
|
||||
from amarillo.models.User import User
|
||||
|
||||
test_user = User(user_id="test", password="testpassword", permissions=["all:read", "mfdz:write", "ride2go:all", "gtfs"])
|
||||
admin_user = User(user_id="admin", password="testpassword", permissions=["admin"])
|
||||
|
||||
def test_operation():
|
||||
verify_permission("gtfs", test_user)
|
||||
|
||||
with pytest.raises(HTTPException):
|
||||
verify_permission("geojson", test_user)
|
||||
|
||||
def test_agency_permission():
|
||||
verify_permission("mvv:read", test_user)
|
||||
verify_permission("mfdz:read", test_user)
|
||||
verify_permission("mfdz:write", test_user)
|
||||
verify_permission("ride2go:write", test_user)
|
||||
|
||||
with pytest.raises(HTTPException):
|
||||
verify_permission("mvv:write", test_user)
|
||||
verify_permission("mvv:all", test_user)
|
||||
|
||||
|
||||
def test_admin():
|
||||
verify_permission("admin", admin_user)
|
||||
verify_permission("gtfs", admin_user)
|
||||
verify_permission("all:all", admin_user)
|
||||
verify_permission("mvv:all", admin_user)
|
||||
verify_permission("mfdz:read", admin_user)
|
||||
verify_permission("mfdz:write", admin_user)
|
||||
verify_permission("ride2go:write", admin_user)
|
||||
|
||||
with pytest.raises(HTTPException):
|
||||
verify_permission("admin", test_user)
|
||||
verify_permission("all:all", test_user)
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
|
@ -50,7 +49,7 @@ def geodesic_distance_in_m(coord1, coord2):
|
|||
|
||||
|
||||
def copy_static_files(files_and_dirs_to_copy):
|
||||
amarillo_dir = Path(__file__).parents[2]
|
||||
amarillo_dir = Path(__file__).parents[1]
|
||||
source_dir = os.path.join(amarillo_dir, "static")
|
||||
|
||||
destination_dir = os.getcwd()
|
||||
|
|
@ -59,6 +58,9 @@ def copy_static_files(files_and_dirs_to_copy):
|
|||
source_path = os.path.join(source_dir, item)
|
||||
destination_path = os.path.join(destination_dir, item)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
raise FileNotFoundError(source_path)
|
||||
|
||||
if os.path.exists(destination_path):
|
||||
# logger.info(f"{item} already exists")
|
||||
continue
|
||||
|
|
@ -1,17 +1,24 @@
|
|||
[project]
|
||||
name = "amarillo-core"
|
||||
version = "0.0.11"
|
||||
name = "amarillo"
|
||||
version = "0.0.15a4"
|
||||
description = "Aggregates and enhances carpooling-offers and publishes them as GTFS(-RT)"
|
||||
readme = "README.md"
|
||||
license = {file = "LICENSE"}
|
||||
keywords = ["amarillo", "ridesharing", "carpooling", "gtfs", "gtfs-rt"]
|
||||
dependencies = [
|
||||
"fastapi[all]==0.104.0",
|
||||
"fastapi[all]==0.109.0",
|
||||
"geopandas==0.14",
|
||||
"uvicorn[standard]==0.23.2",
|
||||
"pydantic[dotenv]==2.4.2",
|
||||
"protobuf==3.20.3",
|
||||
"starlette",
|
||||
"starlette~=0.35",
|
||||
"requests==2.31.0",
|
||||
"pyproj==3.6.1",
|
||||
"geojson-pydantic==1.0.1",
|
||||
"pytest",
|
||||
"watchdog==3.0.0",
|
||||
"python-jose[cryptography]",
|
||||
"bcrypt==4.0.1",
|
||||
"passlib[bcrypt]"
|
||||
]
|
||||
|
||||
[tool.setuptools.packages]
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
fastapi[all]==0.104.0
|
||||
fastapi[all]==0.109.0
|
||||
geopandas==0.14
|
||||
uvicorn[standard]==0.23.2
|
||||
pydantic[dotenv]==2.4.2
|
||||
protobuf==3.20.3
|
||||
starlette
|
||||
starlette~=0.35
|
||||
requests==2.31.0
|
||||
pyproj==3.6.1
|
||||
geojson-pydantic==1.0.1
|
||||
pytest
|
||||
python-jose[cryptography]
|
||||
bcrypt==4.0.1
|
||||
passlib[bcrypt]
|
||||
Loading…
Reference in a new issue