Compare commits

...

25 commits

Author SHA1 Message Date
Csaba 797eff5b2a Change bearer token expiry to 1 week
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-05-22 14:53:10 +02:00
Csaba b9b47dfc2a Updated Jenkinfile to use grfs-exporter
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-04-22 15:06:02 +02:00
Csaba a04397f59d Use verify_permission in routes
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-04-22 14:49:42 +02:00
Csaba c8acc46382 verify_permission function 2024-04-22 12:49:13 +02:00
Csaba 11d5849290 Get current user function
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-04-18 16:55:35 +02:00
Csaba 66cc746937 Use /data for region and agency configuration
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-04-05 14:28:51 +02:00
Csaba 6af3250bea Renamed AgencyConf to User 2024-04-05 14:13:56 +02:00
Csaba 6f019020ea Added error logging to file in logging.conf
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-03-06 09:50:49 +01:00
Csaba 3ed38a959d Added secret_key variable to Dockerfile
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-03-06 09:20:28 +01:00
Csaba b1aeac37df [#4] load secret key from env variable
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-03-01 16:01:02 +01:00
Csaba 206f93ddde Use OAuth2 to authorize endpoints
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-03-01 15:09:52 +01:00
Csaba acd06b522a [#4] Make password and api_key optional in AgencyConf 2024-03-01 14:40:06 +01:00
Csaba abf5b071a4 Remove metrics plugin secrets 2024-03-01 14:22:29 +01:00
Csaba e81dbbc39c [#4] Fix KeyError in authenticate_agency
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-02-28 11:04:21 +01:00
Csaba bbba8de7ac [#4] Verify password from agencyconf 2024-02-28 10:53:05 +01:00
Csaba c03d5b1232 [#4] Passwords in AgencyConf model 2024-02-28 09:34:13 +01:00
Csaba 319be9f803 Basic OAuth2 authentication 2024-02-26 10:59:09 +01:00
Csaba c521fe5a9a [#3] Added route_color and route_text_color to Carpool model
Some checks failed
Amarillo/amarillo-gitea/amarillo-core/pipeline/head There was a failure building this commit
2024-02-21 12:39:06 +01:00
Csaba 85aaa1e6b1 CRUD metrics
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-02-19 11:13:23 +01:00
Csaba 5aee8b9ecf Only deploy from main branch
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-02-16 14:58:39 +01:00
Csaba 08e0c545a1 Added mitanand.mfdz.de to server list
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-02-15 16:33:29 +01:00
Csaba a033ac86c4 Moved conf directory to amarillo/static
All checks were successful
Amarillo/amarillo-gitea/amarillo-core/pipeline/head This commit looks good
2024-02-14 11:35:48 +01:00
Csaba 1d3150918b Updated .gitignore 2024-02-14 11:34:51 +01:00
Csaba c6aa3e7b8d Added amarillo-grfs-export to Jenkinsfile 2024-02-14 10:56:51 +01:00
Csaba 3de2621b90 rename /app to /amarillo 2024-02-13 10:47:03 +01:00
70 changed files with 853 additions and 1199 deletions

54
.gitignore vendored
View file

@ -1,4 +1,3 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
@ -21,6 +20,7 @@ parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
@ -50,7 +50,6 @@ coverage.xml
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
@ -73,7 +72,6 @@ instance/
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
@ -84,9 +82,7 @@ profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
@ -95,22 +91,7 @@ ipython_config.py
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
@ -147,20 +128,11 @@ dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
setup.ipynb
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.vscode
*~
/.idea/
/.vscode/
secrets
gtfs/*.zip
gtfs/*.pbf
@ -170,12 +142,14 @@ data/enhanced/
data/failed/
data/trash/
data/gtfs/
data/tmp/
data/agencyconf
data/grfs
data/tmp
data/users/**
data/**
#these files are under app/static but they get copied to the outside directory on startup
logging.conf
config
static/*
templates/*
static/**
templates/**
conf/**

View file

@ -17,6 +17,7 @@ RUN \
ENV ADMIN_TOKEN=''
ENV RIDE2GO_TOKEN=''
ENV SECRET_KEY=''
ENV METRICS_USER=''
ENV METRICS_PASSWORD=''
@ -30,13 +31,13 @@ RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
RUN --mount=type=secret,id=AMARILLO_REGISTRY_CREDENTIALS \
pip install --no-cache-dir --upgrade --extra-index-url https://$(cat /run/secrets/AMARILLO_REGISTRY_CREDENTIALS)@${PACKAGE_REGISTRY_URL} ${PLUGINS}
COPY ./amarillo/app /app/amarillo/app
COPY ./amarillo /app/amarillo
COPY ./amarillo/plugins /app/amarillo/plugins
COPY ./amarillo/static/static /app/static
COPY ./amarillo/static/templates /app/templates
COPY ./amarillo/static/config /app
COPY ./amarillo/static/logging.conf /app
COPY ./conf /app/conf
COPY ./amarillo/static/conf /app/conf
# This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead
# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
# CMD ["uvicorn", "amarillo.main:app", "--host", "0.0.0.0", "--port", "8000"]

13
Jenkinsfile vendored
View file

@ -7,9 +7,9 @@ pipeline {
DOCKER_REGISTRY_URL = 'https://git.gerhardt.io'
OWNER = 'amarillo'
IMAGE_NAME = 'amarillo'
AMARILLO_DISTRIBUTION = '0.1'
AMARILLO_DISTRIBUTION = '0.2'
TAG = "${AMARILLO_DISTRIBUTION}.${BUILD_NUMBER}"
PLUGINS = 'amarillo-metrics amarillo-enhancer'
PLUGINS = 'amarillo-metrics amarillo-enhancer amarillo-grfs-exporter'
DEPLOY_WEBHOOK_URL = 'http://amarillo.mfdz.de:8888/mitanand'
DEPLOY_SECRET = credentials('AMARILLO-JENKINS-DEPLOY-SECRET')
}
@ -42,6 +42,9 @@ pipeline {
}
}
stage('Build docker image') {
when {
branch 'main'
}
steps {
echo 'Building image'
script {
@ -52,6 +55,9 @@ pipeline {
}
}
stage('Push image to container registry') {
when {
branch 'main'
}
steps {
echo 'Pushing image to registry'
script {
@ -64,6 +70,9 @@ pipeline {
}
}
stage('Notify CD script') {
when {
branch 'main'
}
steps {
echo 'Triggering deploy webhook'
script {

View file

@ -1,2 +1,2 @@
recursive-include amarillo/static/ *
recursive-include amarillo/app/tests/ *
recursive-include amarillo/tests/ *

View file

@ -9,9 +9,11 @@ An Amarillo is a [yellow-dressed person](https://www.dreamstime.com/sancti-spiri
- Python 3.9.2 with pip
- python3-venv
Create a virtual environment `python3 -m venv venv`. Activate the environment with `source venv/bin/activate` and install the dependencies `pip install -r requirements.txt`.
Create a virtual environment `python3 -m venv venv`.
Run `uvicorn amarillo.app.main:app`.
Activate the environment and install the dependencies `pip install -r requirements.txt`.
Run `uvicorn amarillo.main:app`.
In development, you can use `--reload`.
@ -20,8 +22,6 @@ In development, you can use `--reload`.
- `env`
- `ADMIN_TOKEN`
E.g. set the environment variable like this: `export ADMIN_TOKEN=YOUR_SECRET_TOKEN_HERE`.
## Security
All endpoints are protected by an API-Key in the HTTP header.
@ -29,38 +29,38 @@ There is a special *admin* user.
For this user, the API-Key must be passed in as an environment variable when
Amarillo is started.
The admin can create additional API-Keys in the `/agencyconf` endpoint. This
The admin can create additional API-Keys in the `/users` endpoint. This
endpoint is always available but not always shown in `/docs`, especially not
when running in production.
The Swagger docs for `/agencyconf` can be seen on the MFDZ demo server.
The Swagger docs for `/users` can be seen on the MFDZ demo server.
Permissions work this way
- the admin is allowed to call all operations on all resources. Only the admin
can create new API-Keys by POSTing an `AgencyConf` JSON object to `/agencyconf`.
can create new API-Keys by POSTing an `users` JSON object to `/users`.
- API-Keys for agencies are allowed to POST/PUT/GET/DELETE their own
resources and GET some public resources.
resources and GET some public resources.
## Development
### GTFS-RT python bindings
In case you modify or update the proto-files in app/proto, you'll need to regenerate the python bindings. First, create the python files:
In case you modify or update the proto-files in amarillo/proto, you'll need to regenerate the python bindings. First, create the python files:
```sh
$ cd app/proto
$ cd amarillo/proto
$ protoc --version
libprotoc 3.21.6
$ protoc --proto_path=. --python_out=../services/gtfsrt gtfs-realtime.proto realtime_extension.proto
$ sed 's/import gtfs_realtime_pb2/import app.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
$ sed 's/import gtfs_realtime_pb2/import amarillo.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
```
## Testing
In the top directory, run `pytest app/tests`.
In the top directory, run `pytest amarillo/tests`.
## Docker
Based on [tiangolo/uvicorn-gunicorn:python3.9-slim](https://github.com/tiangolo/uvicorn-gunicorn-docker)
- build `docker build -t amarillo -t latest --build-arg='PACKAGE_REGISTRY_URL=$PACKAGE_REGISTRY_URL' --build-arg="PLUGINS=amarillo-metrics amarillo-enhancer" --secret id=AMARILLO_REGISTRY_CREDENTIALS,src=credentials . `
- run `docker run --rm --name amarillo -p 8000:80 -e MODULE_NAME=amarillo.app.main -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e METRICS_USER=$METRICS_USER -e METRICS_PASSWORD=$METRICS_PASSWORD -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`
- build `docker build -t amarillo --build-arg="PLUGINS=amarillo-metrics" .`
- run `docker run --rm --name amarillo -p 8000:80 -e MAX_WORKERS="1" -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`

1
amarillo/__init__.py Normal file
View file

@ -0,0 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

View file

@ -1,26 +0,0 @@
from pydantic import ConfigDict, BaseModel, Field
class AgencyConf(BaseModel):
agency_id: str = Field(
description="ID of the agency that uses this token.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
api_key: str = Field(
description="The agency's API key for using the API",
min_length=20,
max_length=256,
pattern=r'^[a-zA-Z0-9]+$',
examples=["d8yLuY4DqMEUCLcfJASi"])
model_config = ConfigDict(json_schema_extra={
"title": "Agency Configuration",
"description": "Configuration for an agency.",
"example":
{
"agency_id": "mfdz",
"api_key": "d8yLuY4DqMEUCLcfJASi"
}
})

View file

@ -1,32 +0,0 @@
from collections import namedtuple
from datetime import timedelta
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_short_name')
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id driver_id service_id shape_id trip_headsign bikes_allowed trip_url')
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
GtfsDriver = namedtuple('GtfsDriver','driver_id profile_picture rating')
GtfsAdditionalRidesharingInfo = namedtuple('GtfsAdditionalRidesharingInfo','trip_id number_free_seats same_gender luggage_size animal_car car_model car_brand creation_date smoking payment_method')
# TODO Move to utils
class GtfsTimeDelta(timedelta):
def __str__(self):
seconds = self.total_seconds()
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = seconds % 60
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
return (str)
def __add__(self, other):
if isinstance(other, timedelta):
return self.__class__(self.days + other.days,
self.seconds + other.seconds,
self.microseconds + other.microseconds)
return NotImplemented

View file

@ -1,103 +0,0 @@
import logging
from typing import List
from fastapi import APIRouter, HTTPException, status, Header, Depends
from amarillo.app.models.AgencyConf import AgencyConf
from amarillo.app.services.agencyconf import AgencyConfService
from amarillo.app.services.config import config
from amarillo.app.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/agencyconf",
tags=["agencyconf"]
)
# This endpoint is not shown in PROD installations, only in development
# TODO make this an explicit config option
include_in_schema = config.env != 'PROD'
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_admin_api_key(X_API_Key: str = Header(...)):
if X_API_Key != config.admin_token:
message="X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
return "admin"
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_api_key(X_API_Key: str = Header(...)):
agency_conf_service: AgencyConfService = container['agencyconf']
return agency_conf_service.check_api_key(X_API_Key)
# TODO Return code 403 Unauthoized (in response_status_codes as well...)
async def verify_permission_for_same_agency_or_admin(agency_id_in_path_or_body, agency_id_from_api_key):
"""Verifies that an agency is accessing something it owns or the user is admin
The agency_id is part of some paths, or when not in the path it is in the body, e.g. in PUT /carpool.
This function encapsulates the formula 'working with own stuff, or admin'.
"""
is_permitted = agency_id_in_path_or_body == agency_id_from_api_key or agency_id_from_api_key == "admin"
if not is_permitted:
message = f"Working with {agency_id_in_path_or_body} resources is not permitted for {agency_id_from_api_key}."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
@router.get("/",
include_in_schema=include_in_schema,
operation_id="getAgencyIdsWhichHaveAConfiguration",
summary="Get agency_ids which have a configuration",
response_model=List[str],
description="Returns the agency_ids but not the details.",
status_code=status.HTTP_200_OK)
async def get_agency_ids(admin_api_key: str = Depends(verify_api_key)) -> [str]:
return container['agencyconf'].get_agency_ids()
@router.post("/",
include_in_schema=include_in_schema,
operation_id="postNewAgencyConf",
summary="Post a new AgencyConf")
async def post_agency_conf(agency_conf: AgencyConf, admin_api_key: str = Depends(verify_admin_api_key)):
agency_conf_service: AgencyConfService = container['agencyconf']
agency_conf_service.add(agency_conf)
# TODO 400->403
@router.delete("/{agency_id}",
include_in_schema=include_in_schema,
operation_id="deleteAgencyConf",
status_code=status.HTTP_200_OK,
summary="Delete configuration of an agency. Returns true if the token for the agency existed, "
"false if it didn't exist."
)
async def delete_agency_conf(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)):
agency_may_delete_own = requesting_agency_id == agency_id
admin_may_delete_everything = requesting_agency_id == "admin"
is_permitted = agency_may_delete_own or admin_may_delete_everything
if not is_permitted:
message = f"The API key for {requesting_agency_id} can not delete the configuration for {agency_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_conf_service: AgencyConfService = container['agencyconf']
agency_exists = agency_id in agency_conf_service.get_agency_ids()
if not agency_exists:
message = f"No config for {agency_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_conf_service.delete(agency_id)

View file

@ -1,88 +0,0 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.app.models.Carpool import Region
from amarillo.app.routers.agencyconf import verify_admin_api_key, verify_api_key
from amarillo.app.services.regions import RegionService
from amarillo.app.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region
@router.get("/{region_id}/gtfs",
summary="Return GTFS Feed for this region",
response_description="GTFS-Feed (zip-file)",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
}
)
async def get_file(region_id: str, user: str = Depends(verify_api_key)):
_assert_region_exists(region_id)
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
@router.get("/{region_id}/gtfs-rt",
summary="Return GTFS-RT Feed for this region",
response_description="GTFS-RT-Feed",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
}
)
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_api_key)):
_assert_region_exists(region_id)
if format == 'json':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
elif format == 'protobuf':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
else:
message = "Specified format is not supported, i.e. neither protobuf nor json."
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)

View file

@ -1,111 +0,0 @@
import json
import os
from glob import glob
from typing import Dict, List
import logging
from fastapi import HTTPException, status
from amarillo.app.models.AgencyConf import AgencyConf
from amarillo.app.services.config import config
logger = logging.getLogger(__name__)
agency_conf_directory = 'data/agencyconf'
class AgencyConfService:
def __init__(self):
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
self.agency_id_to_agency_conf: Dict[str, AgencyConf] = {}
self.api_key_to_agency_id: Dict[str, str] = {}
for agency_conf_file_name in glob(f'{agency_conf_directory}/*.json'):
with open(agency_conf_file_name) as agency_conf_file:
dictionary = json.load(agency_conf_file)
agency_conf = AgencyConf(**dictionary)
agency_id = agency_conf.agency_id
api_key = agency_conf.api_key
self.agency_id_to_agency_conf[agency_id] = agency_conf
self.api_key_to_agency_id[api_key] = agency_conf.agency_id
def get_agency_conf(self, agency_id: str) -> AgencyConf:
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
return agency_conf
def check_api_key(self, api_key: str) -> str:
"""Check if the API key is valid
The agencies' api keys are checked first, and the admin's key.
The agency_id or "admin" is returned for further checks in the caller if the
request is permitted, like {agency_id} == agency_id.
"""
agency_id = self.api_key_to_agency_id.get(api_key)
is_agency = agency_id is not None
if is_agency:
return agency_id
is_admin = api_key == config.admin_token
if is_admin:
return "admin"
message = "X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=400, detail=message)
def add(self, agency_conf: AgencyConf):
agency_id = agency_conf.agency_id
api_key = agency_conf.api_key
agency_id_exists_already = self.agency_id_to_agency_conf.get(agency_id) is not None
if agency_id_exists_already:
message = f"Agency {agency_id} exists already. To update, delete it first."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_using_this_api_key_already = self.api_key_to_agency_id.get(api_key)
a_different_agency_is_using_this_api_key_already = \
agency_using_this_api_key_already is not None and \
agency_using_this_api_key_already != agency_id
if a_different_agency_is_using_this_api_key_already:
message = f"Duplicate API Key for {agency_id} not permitted. Use a different key."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
with open(f'{agency_conf_directory}/{agency_id}.json', 'w', encoding='utf-8') as f:
f.write(agency_conf.json())
self.agency_id_to_agency_conf[agency_id] = agency_conf
self.api_key_to_agency_id[api_key] = agency_id
logger.info(f"Added configuration for agency {agency_id}.")
def get_agency_ids(self) -> List[str]:
return list(self.agency_id_to_agency_conf.keys())
def delete(self, agency_id):
agency_conf = self.agency_id_to_agency_conf.get(agency_id)
api_key = agency_conf.api_key
del self.api_key_to_agency_id[api_key]
del self.agency_id_to_agency_conf[agency_id]
os.remove(f'{agency_conf_directory}/{agency_id}.json')
logger.info(f"Deleted configuration for agency {agency_id}.")

View file

@ -1,137 +0,0 @@
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2
import amarillo.app.services.gtfsrt.realtime_extension_pb2 as mfdzrte
from amarillo.app.services.gtfs_constants import *
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import ParseDict
from datetime import datetime, timedelta
import json
import re
import time
class GtfsRtProducer():
def __init__(self, trip_store):
self.trip_store = trip_store
def generate_feed(self, time, format='protobuf', bbox=None):
# See https://developers.google.com/transit/gtfs-realtime/reference
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
gtfsrt_dict = {
'header': {
'gtfsRealtimeVersion': '1.0',
'timestamp': int(time)
},
'entity': self._get_trip_updates(bbox)
}
feed = gtfs_realtime_pb2.FeedMessage()
ParseDict(gtfsrt_dict, feed)
if "message" == format.lower():
return feed
elif "json" == format.lower():
return MessageToDict(feed)
else:
return feed.SerializeToString()
def export_feed(self, timestamp, file_path, bbox=None):
"""
Exports gtfs-rt feed as .json and .pbf file to file_path
"""
feed = self.generate_feed(timestamp, "message", bbox)
with open(f"{file_path}.pbf", "wb") as f:
f.write(feed.SerializeToString())
with open(f"{file_path}.json", "w") as f:
json.dump(MessageToDict(feed), f)
def _get_trip_updates(self, bbox = None):
trips = []
trips.extend(self._get_added(bbox))
trips.extend(self._get_deleted(bbox))
trip_updates = []
for num, trip in enumerate(trips):
trip_updates.append( {
'id': f'carpool-update-{num}',
'tripUpdate': trip
}
)
return trip_updates
def _get_deleted(self, bbox = None):
return self._get_updates(
self.trip_store.recently_deleted_trips(),
self._as_delete_updates,
bbox)
def _get_added(self, bbox = None):
return self._get_updates(
self.trip_store.recently_added_trips(),
self._as_added_updates,
bbox)
def _get_updates(self, trips, update_func, bbox = None):
updates = []
today = datetime.today()
for t in trips:
if bbox == None or t.intersects(bbox):
updates.extend(update_func(t, today))
return updates
def _as_delete_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'CANCELED',
'routeId': trip.trip_id
}
} for trip_date in trip.next_trip_dates(fromdate)]
def _to_seconds(self, fromdate, stop_time):
startdate = datetime.strptime(fromdate, '%Y%m%d')
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
delta = timedelta(
hours=int(m.group(1)),
minutes=int(m.group(2)),
seconds=int(m.group(3)))
return time.mktime((startdate + delta).timetuple())
def _to_stop_times(self, trip, fromdate):
return [{
'stopSequence': stoptime.stop_sequence,
'arrival': {
'time': self._to_seconds(fromdate, stoptime.arrival_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'departure': {
'time': self._to_seconds(fromdate, stoptime.departure_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'stopId': stoptime.stop_id,
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
}
}
}
for stoptime in trip.stop_times]
def _as_added_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'ADDED',
'routeId': trip.trip_id,
'[transit_realtime.trip_descriptor]': {
'routeUrl' : trip.url,
'agencyId' : trip.agency,
'route_long_name' : trip.route_long_name(),
'route_type': RIDESHARING_ROUTE_TYPE
}
},
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
} for trip_date in trip.next_trip_dates(fromdate)]

View file

@ -1,14 +0,0 @@
# Constants
NO_BIKES_ALLOWED = 2
RIDESHARING_ROUTE_TYPE = 1551
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
STOP_TIMES_STOP_TYPE_REGULARLY = 0
STOP_TIMES_STOP_TYPE_NONE = 1
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
STOP_TIMES_TIMEPOINT_EXACT = 1
MFDZ_DEFAULT_UNCERTAINITY = 600

File diff suppressed because one or more lines are too long

View file

@ -1,33 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: realtime_extension.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
# @@protoc_insertion_point(module_scope)

View file

@ -1,47 +0,0 @@
import requests
import logging
logger = logging.getLogger(__name__)
class RoutingException(Exception):
def __init__(self, message):
# Call Exception.__init__(message)
# to use the same Message header as the parent class
super().__init__(message)
class RoutingService():
def __init__(self, gh_url = 'https://api.mfdz.de/gh'):
self.gh_service_url = gh_url
def path_for_stops(self, points):
# Retrieve graphhopper route traversing given points
directions = self._get_directions(points)
if directions and len(directions.get("paths"))>0:
return directions.get("paths")[0]
else:
return {}
def _get_directions(self, points):
req_url = self._create_url(points, True, True)
logger.debug("Get directions via: {}".format(req_url))
response = requests.get(req_url)
status = response.status_code
if status == 200:
# Found route between points
return response.json()
else:
try:
message = response.json().get('message')
except:
raise RoutingException("Get directions failed with status code {}".format(status))
else:
raise RoutingException(message)
def _create_url(self, points, calc_points = False, instructions = False):
""" Creates GH request URL """
locations = ""
for point in points:
locations += "point={0}%2C{1}&".format(point.y, point.x)
return "{0}/route?{1}instructions={2}&calc_points={3}&points_encoded=false".format(
self.gh_service_url, locations, instructions, calc_points)

View file

@ -1,182 +0,0 @@
import csv
import geopandas as gpd
import pandas as pd
from amarillo.app.models.Carpool import StopTime
from contextlib import closing
from shapely.geometry import Point, LineString
from shapely.ops import transform
from pyproj import Proj, Transformer
import re
import requests
from io import TextIOWrapper
import codecs
import logging
logger = logging.getLogger(__name__)
class StopsStore():
def __init__(self, stop_sources = [], internal_projection = "EPSG:32632"):
self.internal_projection = internal_projection
self.projection = Transformer.from_crs("EPSG:4326", internal_projection, always_xy=True).transform
self.stopsDataFrames = []
self.stop_sources = stop_sources
def load_stop_sources(self):
"""Imports stops from stop_sources and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m.
Subsequent calls of load_stop_sources will reload all stop_sources
but replace the current stops only if all stops could be loaded successfully.
"""
stopsDataFrames = []
error_occured = False
for stops_source in self.stop_sources:
try:
stopsDataFrame =self._load_stops(stops_source["url"])
stopsDataFrames.append({'distanceInMeter': stops_source["vicinity"],
'stops': stopsDataFrame})
except Exception as err:
error_occured = True
logger.error("Failed to load stops from %s to StopsStore.", stops_source["url"], exc_info=True)
if not error_occured:
self.stopsDataFrames = stopsDataFrames
def find_additional_stops_around(self, line, stops = None):
"""Returns a GeoDataFrame with all stops in vicinity of the
given line, sorted by distance from origin of the line.
Note: for internal projection/distance calculations, the
lat/lon geometries of line and stops are converted to
"""
stops_frames = []
if stops:
stops_frames.append(self._convert_to_dataframe(stops))
transformedLine = transform(self.projection, LineString(line.coordinates))
for stops_to_match in self.stopsDataFrames:
stops_frames.append(self._find_stops_around_transformed(stops_to_match['stops'], transformedLine, stops_to_match['distanceInMeter']))
stops = gpd.GeoDataFrame( pd.concat(stops_frames, ignore_index=True, sort=True))
if not stops.empty:
self._sort_by_distance(stops, transformedLine)
return stops
def find_closest_stop(self, carpool_stop, max_search_distance):
transformedCoord = Point(self.projection(carpool_stop.lon, carpool_stop.lat))
best_dist = max_search_distance + 1
best_stop = None
for stops_with_dist in self.stopsDataFrames:
stops = stops_with_dist['stops']
s, d = stops.sindex.nearest(transformedCoord, return_all= True, return_distance=True, max_distance=max_search_distance)
if len(d) > 0 and d[0] < best_dist:
best_dist = d[0]
row = s[1][0]
best_stop = StopTime(name=stops.at[row, 'stop_name'], lat=stops.at[row, 'y'], lon=stops.at[row, 'x'])
return best_stop if best_stop else carpool_stop
def _normalize_stop_name(self, stop_name):
default_name = 'P+R-Parkplatz'
if stop_name in ('', 'Park&Ride'):
return default_name
normalized_stop_name = re.sub(r"P(ark)?\s?[\+&]\s?R(ail|ide)?",'P+R', stop_name)
return normalized_stop_name
def _load_stops(self, source : str):
"""Loads stops from given source and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m
"""
logger.info("Load stops from %s", source)
if source.startswith('http'):
if source.endswith('json'):
with requests.get(source) as json_source:
stopsDataFrame = self._load_stops_geojson(json_source.json())
else:
with requests.get(source) as csv_source:
stopsDataFrame = self._load_stops_csv(codecs.iterdecode(csv_source.iter_lines(), 'utf-8'))
else:
with open(source, encoding='utf-8') as csv_source:
stopsDataFrame = self._load_stops_csv(csv_source)
return stopsDataFrame
def _load_stops_csv(self, csv_source):
id = []
lat = []
lon = []
stop_name = []
reader = csv.DictReader(csv_source, delimiter=';')
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in reader:
for col, lst in zip(columns, lists):
if col == "stop_lat" or col == "stop_lon":
lst.append(float(row[col].replace(",",".")))
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row[col])
lst.append(row_stop_name)
else:
lst.append(row[col])
return self._as_dataframe(id, lat, lon, stop_name)
def _load_stops_geojson(self, geojson_source):
id = []
lat = []
lon = []
stop_name = []
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in geojson_source['features']:
coord = row['geometry']['coordinates']
if not coord or not row['properties'].get('name'):
logger.error('Stop feature {} has null coord or name'.format(row['id']))
continue
for col, lst in zip(columns, lists):
if col == "stop_lat":
lst.append(coord[1])
elif col == "stop_lon":
lst.append(coord[0])
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row['properties']['name'])
lst.append(row_stop_name)
elif col == "stop_id":
lst.append(row['id'])
return self._as_dataframe(id, lat, lon, stop_name)
def _as_dataframe(self, id, lat, lon, stop_name):
df = gpd.GeoDataFrame(data={'x':lon, 'y':lat, 'stop_name':stop_name, 'id':id})
stopsGeoDataFrame = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.x, df.y, crs='EPSG:4326'))
stopsGeoDataFrame.to_crs(crs=self.internal_projection, inplace=True)
return stopsGeoDataFrame
def _find_stops_around_transformed(self, stopsDataFrame, transformedLine, distance):
bufferedLine = transformedLine.buffer(distance)
sindex = stopsDataFrame.sindex
possible_matches_index = list(sindex.intersection(bufferedLine.bounds))
possible_matches = stopsDataFrame.iloc[possible_matches_index]
exact_matches = possible_matches[possible_matches.intersects(bufferedLine)]
return exact_matches
def _convert_to_dataframe(self, stops):
return gpd.GeoDataFrame([[stop.name, stop.lon, stop.lat,
stop.id, Point(self.projection(stop.lon, stop.lat))] for stop in stops], columns = ['stop_name','x','y','id','geometry'], crs=self.internal_projection)
def _sort_by_distance(self, stops, transformedLine):
stops['distance']=stops.apply(lambda row: transformedLine.project(row['geometry']), axis=1)
stops.sort_values('distance', inplace=True)
def is_carpooling_stop(stop_id, name):
stop_name = name.lower()
# mfdz: or bbnavi: prefixed stops are custom stops which are explicitly meant to be carpooling stops
return stop_id.startswith('mfdz:') or stop_id.startswith('bbnavi:') or 'mitfahr' in stop_name or 'p&m' in stop_name

View file

@ -1,5 +0,0 @@
stop_id;stop_code;stop_lat;stop_lon;stop_name
mfdz:x;x;52.11901;14.2;Stop x
mfdz:y;y;53.1;14.01;Stop y
mfdz:z;z;54.11;14.0;Stop z
mfdz:Ang001;Ang001;53.11901;14.015776;Mitfahrbank Biesenbrow
1 stop_id stop_code stop_lat stop_lon stop_name
2 mfdz:x x 52.11901 14.2 Stop x
3 mfdz:y y 53.1 14.01 Stop y
4 mfdz:z z 54.11 14.0 Stop z
5 mfdz:Ang001 Ang001 53.11901 14.015776 Mitfahrbank Biesenbrow

View file

@ -1,39 +0,0 @@
{
"data": {
"pointsOfInterest": [
{
"id": "14622",
"externalId": "bbnavi:12073:0001",
"name": "Parkbank",
"description": "Parkbank",
"dataProvider": {
"id": "1",
"name": "Administrator"
},
"addresses": [
{
"street": "Hauptstrasse",
"city": "Wittenberge",
"zip": "12345",
"geoLocation": {
"latitude": 52.9932971109789,
"longitude": 11.767383582547
}
}
],
"openStreetMap": {
"capacity": 112,
"capacityCharging": "2",
"capacityDisabled": "",
"fee": "No",
"lit": "Yes",
"parking": "",
"shelter": "No",
"surface": "",
"utilization": "",
"website": ""
}
}
]
}
}

View file

@ -1,24 +0,0 @@
from amarillo.app.services import stops
from amarillo.app.models.Carpool import StopTime
def test_load_stops_from_file():
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_load_csv_stops_from_web_():
store = stops.StopsStore([{"url": "https://data.mfdz.de/mfdz/stops/custom.csv", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_load_geojson_stops_from_web_():
store = stops.StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_find_closest_stop():
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
store.load_stop_sources()
carpool_stop = StopTime(name="start", lat=53.1191, lon=14.01577)
stop = store.find_closest_stop(carpool_stop, 1000)
assert stop.name=='Mitfahrbank Biesenbrow'

View file

@ -1,23 +0,0 @@
from amarillo.app.tests.sampledata import cp1, carpool_repeating
from amarillo.app.services.trips import TripStore
from amarillo.app.services.stops import StopsStore
import logging
logger = logging.getLogger(__name__)
def test_trip_store_put_one_time_carpool():
trip_store = TripStore(StopsStore())
t = trip_store.put_carpool(cp1)
assert t != None
assert len(t.stop_times) >= 2
assert t.stop_times[0].stop_id == 'mfdz:12073:001'
assert t.stop_times[-1].stop_id == 'de:12073:900340137::3'
def test_trip_store_put_repeating_carpool():
trip_store = TripStore(StopsStore())
t = trip_store.put_carpool(carpool_repeating)
assert t != None
assert len(t.stop_times) >= 2

View file

@ -1,14 +1,14 @@
# separate file so that it can be imported without initializing FastAPI
from amarillo.app.utils.container import container
from amarillo.utils.container import container
import logging
from amarillo.app.services.agencyconf import AgencyConfService, agency_conf_directory
from amarillo.app.services.agencies import AgencyService
from amarillo.app.services.regions import RegionService
from amarillo.services.users import UserService, user_conf_directory
from amarillo.services.agencies import AgencyService
from amarillo.services.regions import RegionService
from amarillo.app.services.config import config
from amarillo.services.config import config
from amarillo.app.utils.utils import assert_folder_exists
from amarillo.utils.utils import assert_folder_exists
logger = logging.getLogger(__name__)
@ -27,12 +27,12 @@ def create_required_directories():
assert_folder_exists(f'data/{subdir}/{agency_id}')
# Agency configurations
assert_folder_exists(agency_conf_directory)
assert_folder_exists(user_conf_directory)
def configure_services():
container['agencyconf'] = AgencyConfService()
logger.info("Loaded %d agency configuration(s)", len(container['agencyconf'].agency_id_to_agency_conf))
container['users'] = UserService()
logger.info("Loaded %d user configuration(s)", len(container['users'].user_id_to_user_conf))
container['agencies'] = AgencyService()
logger.info("Loaded %d agencies", len(container['agencies'].agencies))

View file

@ -6,20 +6,18 @@ import uvicorn
import mimetypes
from starlette.staticfiles import StaticFiles
from amarillo.app.utils.utils import copy_static_files
from amarillo.utils.utils import copy_static_files
#this has to run before app.configuration is imported, otherwise we get validation error for config because the config file is not copied yet
copy_static_files(["conf", "static", "templates", "logging.conf", "config"])
import amarillo.plugins
from amarillo.app.configuration import configure_services, configure_admin_token
from amarillo.app.routers import carpool, agency, agencyconf, region
from amarillo.configuration import configure_services, configure_admin_token
from amarillo.routers import carpool, agency, region, users
import amarillo.services.oauth2 as oauth2
from fastapi import FastAPI
from amarillo.app.views import home
# https://pydantic-docs.helpmanual.io/usage/settings/
# from amarillo.app.views import home
from amarillo.views import home
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main")
@ -57,6 +55,10 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
},
}],
servers=[
{
"description": "MobiData BW Amarillo service",
"url": "https://amarillo.mobidata-bw.de"
},
{
"description": "DABB bbnavi Amarillo service",
"url": "https://amarillo.bbnavi.de"
@ -83,8 +85,9 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
app.include_router(carpool.router)
app.include_router(agency.router)
app.include_router(agencyconf.router)
app.include_router(users.router)
app.include_router(region.router)
app.include_router(oauth2.router)
def iter_namespace(ns_pkg):
@ -97,14 +100,14 @@ def load_plugins():
for finder, name, ispkg
in iter_namespace(amarillo.plugins)
}
print(f"Discovered plugins: {list(discovered_plugins.keys())}")
logger.info(f"Discovered plugins: {list(discovered_plugins.keys())}")
for name, module in discovered_plugins.items():
if hasattr(module, "setup"):
print(f"Running setup function for {name}")
logger.info(f"Running setup function for {name}")
module.setup(app)
else: print(f"Did not find setup function for {name}")
else: logger.info(f"Did not find setup function for {name}")
def configure():
configure_admin_token()
@ -125,4 +128,3 @@ if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)
else:
configure()
pass

View file

@ -341,7 +341,22 @@ class Carpool(BaseModel):
"published.",
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
'"sunday"]'])
route_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Route color designation that matches public facing material. "
"The color difference between route_color and route_text_color "
"should provide sufficient contrast when viewed on a black and "
"white screen.",
examples=["0039A6"])
route_text_color: Optional[str] = Field(
None,
pattern='^([0-9A-Fa-f]{6})$',
description="Legible color to use for text drawn against a background of "
"route_color. The color difference between route_color and "
"route_text_color should provide sufficient contrast when "
"viewed on a black and white screen.",
examples=["D4D2D2"])
path: Optional[LineString] = Field(
None, description="Optional route geometry as json LineString.")

38
amarillo/models/User.py Normal file
View file

@ -0,0 +1,38 @@
from typing import Annotated, Optional, List
from pydantic import ConfigDict, BaseModel, Field
class User(BaseModel):
#TODO: add attributes admin, permissions, fullname, email
user_id: str = Field(
description="ID of the agency that uses this token.",
min_length=1,
max_length=20,
pattern='^[a-zA-Z0-9]+$',
examples=["mfdz"])
api_key: Optional[str] = Field(None,
description="The agency's API key for using the API",
min_length=20,
max_length=256,
pattern=r'^[a-zA-Z0-9]+$',
examples=["d8yLuY4DqMEUCLcfJASi"])
password: Optional[str] = Field(None,
description="The agency's password for generating JWT tokens",
min_length=8,
max_length=256,
examples=["$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"])
permissions: Optional[List[Annotated[str, Field(pattern=r'^[a-z0-9-]+(:[a-z]+)?$')]]] = Field([],
description="The permissions of this user, a list of strings in the format <agency:operation> or <operation>",
max_length=256,
# pattern=r'^[a-zA-Z0-9]+(:[a-zA-Z]+)?$', #TODO
examples=["ride2go:read", "all:read", "admin", "geojson"])
model_config = ConfigDict(json_schema_extra={
"title": "Agency Configuration",
"description": "Configuration for an agency.",
"example":
{
"agency_id": "mfdz",
"api_key": "d8yLuY4DqMEUCLcfJASi",
"password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW"
}
})

View file

@ -1,121 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Test notebook for discovering and importing plugins"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'amarillo.plugins.metrics': <module 'amarillo.plugins.metrics' from '/home/user/amarillo/amarillo-plugins/amarillo-metrics/amarillo/plugins/metrics/__init__.py'>}"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import importlib\n",
"import pkgutil\n",
"\n",
"import amarillo.plugins # FIXME this namespace does not exist if there are 0 plugins installed\n",
"\n",
"def iter_namespace(ns_pkg):\n",
" # Source: https://packaging.python.org/guides/creating-and-discovering-plugins/\n",
" return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + \".\")\n",
"\n",
"discovered_plugins = {\n",
" name: importlib.import_module(name)\n",
" for finder, name, ispkg\n",
" in iter_namespace(amarillo.plugins)\n",
"}\n",
"\n",
"discovered_plugins"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['__name__',\n",
" '__doc__',\n",
" '__package__',\n",
" '__loader__',\n",
" '__spec__',\n",
" '__path__',\n",
" '__file__',\n",
" '__cached__',\n",
" '__builtins__',\n",
" 'metrics',\n",
" 'json',\n",
" 'logging',\n",
" 'os',\n",
" 'random',\n",
" 'Callable',\n",
" 'APIRouter',\n",
" 'HTTPException',\n",
" 'Depends',\n",
" 'Request',\n",
" 'datetime',\n",
" 'generate_latest',\n",
" 'Gauge',\n",
" 'Counter',\n",
" 'Info',\n",
" 'FastAPI',\n",
" 'HTTPBasic',\n",
" 'HTTPBasicCredentials',\n",
" 'PlainTextResponse',\n",
" 'secrets',\n",
" 'logger',\n",
" 'security',\n",
" 'amarillo_trips_number_total',\n",
" 'router']"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"metrics = discovered_plugins['amarillo.plugins.metrics']\n",
"\n",
"metrics.__dir__()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View file

@ -4,13 +4,14 @@ from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.app.models.Carpool import Carpool, Agency
from amarillo.app.routers.agencyconf import verify_api_key, verify_admin_api_key, verify_permission_for_same_agency_or_admin
from amarillo.models.Carpool import Carpool, Agency
from amarillo.models.User import User
from amarillo.services.oauth2 import get_current_user, verify_permission
# TODO should move this to service
from amarillo.app.routers.carpool import store_carpool, delete_agency_carpools_older_than
from amarillo.app.services.agencies import AgencyService
from amarillo.app.services.importing.ride2go import import_ride2go
from amarillo.app.utils.container import container
from amarillo.routers.carpool import store_carpool, delete_agency_carpools_older_than
from amarillo.services.agencies import AgencyService
from amarillo.services.importing.ride2go import import_ride2go
from amarillo.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
@ -32,7 +33,7 @@ router = APIRouter(
status.HTTP_404_NOT_FOUND: {"description": "Agency not found"},
},
)
async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key)) -> Agency:
async def get_agency(agency_id: str, requesting_user: User = Depends(get_current_user)) -> Agency:
agencies: AgencyService = container['agencies']
agency = agencies.get_agency(agency_id)
agency_exists = agency is not None
@ -52,6 +53,7 @@ async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key
operation_id="sync",
summary="Synchronizes all carpool offers",
response_model=List[Carpool],
response_model_exclude_none=True,
responses={
status.HTTP_200_OK: {
"description": "Carpool created"},
@ -60,8 +62,8 @@ async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key
status.HTTP_500_INTERNAL_SERVER_ERROR: {
"description": "Import error"}
})
async def sync(agency_id: str, requesting_agency_id: str = Depends(verify_api_key)) -> List[Carpool]:
await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
async def sync(agency_id: str, requesting_user: User = Depends(get_current_user)) -> List[Carpool]:
verify_permission(f"{agency_id}:sync")
if agency_id == "ride2go":
import_function = import_ride2go

View file

@ -5,12 +5,13 @@ import os.path
import re
from glob import glob
from fastapi import APIRouter, Body, Header, HTTPException, status, Depends
from fastapi import APIRouter, Body, HTTPException, status, Depends
from datetime import datetime
from amarillo.app.models.Carpool import Carpool
from amarillo.app.routers.agencyconf import verify_api_key, verify_permission_for_same_agency_or_admin
from amarillo.app.tests.sampledata import examples
from amarillo.models.Carpool import Carpool
from amarillo.models.User import User
from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.tests.sampledata import examples
logger = logging.getLogger(__name__)
@ -25,21 +26,20 @@ router = APIRouter(
summary="Add a new or update existing carpool",
description="Carpool object to be created or updated",
response_model=Carpool,
response_model_exclude_none=True,
responses={
status.HTTP_404_NOT_FOUND: {
"description": "Agency does not exist"},
})
async def post_carpool(carpool: Carpool = Body(..., openapi_examples=examples),
requesting_agency_id: str = Depends(verify_api_key)) -> Carpool:
await verify_permission_for_same_agency_or_admin(carpool.agency, requesting_agency_id)
async def post_carpool(carpool: Carpool = Body(..., examples=examples),
requesting_user: User = Depends(get_current_user)) -> Carpool:
verify_permission(f"{carpool.agency}:write", requesting_user)
logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
await assert_agency_exists(carpool.agency)
await set_lastUpdated_if_unset(carpool)
await save_carpool(carpool)
await store_carpool(carpool)
return carpool
@ -48,12 +48,15 @@ async def post_carpool(carpool: Carpool = Body(..., openapi_examples=examples),
operation_id="getcarpoolById",
summary="Find carpool by ID",
response_model=Carpool,
response_model_exclude_none=True,
description="Find carpool by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"},
},
)
async def get_carpool(agency_id: str, carpool_id: str, api_key: str = Depends(verify_api_key)) -> Carpool:
async def get_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)) -> Carpool:
verify_permission(f"{agency_id}:read", requesting_user)
logger.info(f"Get trip {agency_id}:{carpool_id}.")
await assert_agency_exists(agency_id)
await assert_carpool_exists(agency_id, carpool_id)
@ -72,8 +75,8 @@ async def get_carpool(agency_id: str, carpool_id: str, api_key: str = Depends(ve
"description": "Carpool or agency not found"},
},
)
async def delete_carpool(agency_id: str, carpool_id: str, requesting_agency_id: str = Depends(verify_api_key)):
await verify_permission_for_same_agency_or_admin(agency_id, requesting_agency_id)
async def delete_carpool(agency_id: str, carpool_id: str, requesting_user: User = Depends(get_current_user)):
verify_permission(f"{agency_id}:write", requesting_user)
logger.info(f"Delete trip {agency_id}:{carpool_id}.")
await assert_agency_exists(agency_id)
@ -90,10 +93,30 @@ async def _delete_carpool(agency_id: str, carpool_id: str):
logger.info(f"Saved carpool {agency_id}:{carpool_id} in trash.")
os.remove(f"data/carpool/{agency_id}/{carpool_id}.json")
try:
from amarillo.plugins.metrics import trips_deleted_counter
trips_deleted_counter.inc()
except ImportError:
pass
async def store_carpool(carpool: Carpool) -> Carpool:
carpool_exists = os.path.exists(f"data/carpool/{carpool.agency}/{carpool.id}.json")
await set_lastUpdated_if_unset(carpool)
await save_carpool(carpool)
try:
from amarillo.plugins.metrics import trips_created_counter, trips_updated_counter
if(carpool_exists):
# logger.info("Incrementing trips updated")
trips_updated_counter.inc()
else:
# logger.info("Incrementing trips created")
trips_created_counter.inc()
except ImportError:
pass
return carpool
async def set_lastUpdated_if_unset(carpool):
@ -114,7 +137,7 @@ async def save_carpool(carpool, folder: str = 'data/carpool'):
async def assert_agency_exists(agency_id: str):
agency_exists = os.path.exists(f"conf/agency/{agency_id}.json")
agency_exists = os.path.exists(f"data/agency/{agency_id}.json")
if not agency_exists:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,

View file

@ -0,0 +1,55 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Region
from amarillo.services.regions import RegionService
from amarillo.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region

71
amarillo/routers/users.py Normal file
View file

@ -0,0 +1,71 @@
import logging
from typing import List
from fastapi import APIRouter, HTTPException, status, Header, Depends
from amarillo.models.User import User
from amarillo.services.users import UserService
from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.services.config import config
from amarillo.utils.container import container
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/users",
tags=["users"]
)
# This endpoint is not shown in PROD installations, only in development
# TODO make this an explicit config option
include_in_schema = config.env != 'PROD'
@router.get("/",
include_in_schema=include_in_schema,
operation_id="getUserIdsWhichHaveAConfiguration",
summary="Get user which have a configuration",
response_model=List[str],
description="Returns the user_ids but not the details.",
status_code=status.HTTP_200_OK)
async def get_user_ids(requesting_user: User = Depends(get_current_user)) -> [str]:
return container['users'].get_user_ids()
@router.post("/",
include_in_schema=include_in_schema,
operation_id="postNewUserConf",
summary="Post a new User")
async def post_user_conf(user_conf: User, requesting_user: User = Depends(get_current_user)):
verify_permission("admin", requesting_user)
user_service: UserService = container['users']
user_service.add(user_conf)
# TODO 400->403
@router.delete("/{user_id}",
include_in_schema=include_in_schema,
operation_id="deleteUser",
status_code=status.HTTP_200_OK,
summary="Delete configuration of a user. Returns true if the token for the user existed, "
"false if it didn't exist."
)
async def delete_user(user_id: str, requesting_user: User = Depends(get_current_user)):
user_may_delete_own = requesting_user.user_id == user_id
admin_may_delete_everything = "admin" in requesting_user.permissions
is_permitted = user_may_delete_own or admin_may_delete_everything
if not is_permitted:
message = f"User '{requesting_user.user_id} can not delete the configuration for {user_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_service: UserService = container['users']
agency_exists = user_id in user_service.get_user_ids()
if not agency_exists:
message = f"No config for {user_id}"
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_service.delete(user_id)

View file

@ -2,7 +2,7 @@ import json
from glob import glob
from typing import Dict
from amarillo.app.models.Carpool import Agency
from amarillo.models.Carpool import Agency
# TODO FG HB this service should also listen to pyinotify
# because the (updated) agencies are needed in the enhancer
@ -12,8 +12,7 @@ class AgencyService:
def __init__(self):
self.agencies: Dict[str, Agency] = {}
for agency_file_name in glob('conf/agency/*.json'):
for agency_file_name in glob('data/agency/*.json'):
with open(agency_file_name) as agency_file:
dict = json.load(agency_file)
agency = Agency(**dict)

View file

@ -0,0 +1,66 @@
import json
import logging
from datetime import datetime
from typing import Dict
from amarillo.models.Carpool import Carpool
from amarillo.services.trips import TripStore
from amarillo.utils.utils import yesterday, is_older_than_days
logger = logging.getLogger(__name__)
class CarpoolService():
MAX_OFFER_AGE_IN_DAYS = 180
def __init__(self, trip_store):
self.trip_store = trip_store
self.carpools: Dict[str, Carpool] = {}
def is_outdated(self, carpool):
"""
A carpool offer is outdated, if
* it's completly in the past (if it's a single date offer).
As we know the start time but not latest arrival, we deem
offers starting the day before yesterday as outdated
* it's last update occured before MAX_OFFER_AGE_IN_DAYS
"""
runs_once = not isinstance(carpool.departureDate, set)
return (is_older_than_days(carpool.lastUpdated.date(), self.MAX_OFFER_AGE_IN_DAYS) or
(runs_once and carpool.departureDate < yesterday()))
def purge_outdated_offers(self):
"""
Iterates over all carpools and deletes those which are outdated
"""
for key in list(self.carpools.keys()):
cp = self.carpools.get(key)
if cp and self.is_outdated(cp):
logger.info("Purge outdated offer %s", key)
self.delete(cp.agency, cp.id)
try:
from amarillo.plugins.metrics import trips_deleted_counter
trips_deleted_counter.inc()
except ImportError:
pass
def get(self, agency_id: str, carpool_id: str):
return self.carpools.get(f"{agency_id}:{carpool_id}")
def get_all_ids(self):
return list(self.carpools)
def put(self, agency_id: str, carpool_id: str, carpool):
self.carpools[f"{agency_id}:{carpool_id}"] = carpool
# Outdated trips (which might have been in the store)
# will be deleted
if self.is_outdated(carpool):
logger.info('Deleting outdated carpool %s:%s', agency_id, carpool_id)
self.delete(agency_id, carpool_id)
else:
self.trip_store.put_carpool(carpool)
def delete(self, agency_id: str, carpool_id: str):
id = f"{agency_id}:{carpool_id}"
if id in self.carpools:
del self.carpools[id]
self.trip_store.delete_carpool(agency_id, carpool_id)

View file

@ -6,6 +6,7 @@ class Config(BaseSettings):
admin_token: str
ride2go_query_data: str
env: str = 'DEV'
graphhopper_base_url: str = 'https://api.mfdz.de/gh'
stop_sources_file: str = 'data/stop_sources.json'
config = Config(_env_file='config', _env_file_encoding='utf-8')

View file

@ -2,10 +2,10 @@ import logging
from typing import List
import requests
from amarillo.app.models.Carpool import Carpool, StopTime
from amarillo.app.services.config import config
from amarillo.models.Carpool import Carpool, StopTime
from amarillo.services.config import config
from amarillo.app.services.secrets import secrets
from amarillo.services.secrets import secrets
import re
logger = logging.getLogger(__name__)

View file

@ -0,0 +1,75 @@
import time
mock_added = {
'trip': {
'tripId': 'mifaz:carpool-update-123',
'startTime': '07:33:00',
'startDate': '20220509',
'scheduleRelationship': 'ADDED',
'routeId': 'mifaz:carpool-update-123',
'[transit_realtime.trip_descriptor]': {
'routeUrl' : 'http://myurl',
'agencyId' : 'mifaz',
'route_long_name' : 'Angermünde nach Biesenbrow'}
},
'stopTimeUpdate': [{
'stopSequence': 1,
'arrival': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'de:12073:900340108',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER'
}
}
},
{
'stopSequence': 2,
'arrival': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'mfdz:Ang001',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER',
'pickupType': 'NONE'
}
}
}]
}
mock_trip_updated_added = {
'id': 'mifaz:carpool-update-123',
'tripUpdate': mock_added
}
mock_trip_updated_deleted = {
'id': 'carpool-update-124',
'tripUpdate': {
'trip': {
'tripId': '141',
'startTime': '17:01:08',
'startDate': '20220509',
'scheduleRelationship': 'CANCELED',
'routeId': '141'
}
}
}

165
amarillo/services/oauth2.py Normal file
View file

@ -0,0 +1,165 @@
# OAuth2 authentication based on https://fastapi.tiangolo.com/tutorial/security/oauth2-jwt/#__tabbed_4_2
from datetime import datetime, timedelta, timezone
from typing import Annotated, Optional, Union
import logging
import logging.config
from fastapi import Depends, HTTPException, Header, status, APIRouter
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from jose import JWTError, jwt
from pydantic import BaseModel
from amarillo.models.User import User
from amarillo.services.passwords import verify_password
from amarillo.utils.container import container
from amarillo.services.agencies import AgencyService
from amarillo.services.users import UserService
from amarillo.models.Carpool import Agency
from amarillo.services.secrets import secrets
SECRET_KEY = secrets.secret_key
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 7*24*60
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main")
router = APIRouter()
class Token(BaseModel):
access_token: str
token_type: str
class TokenData(BaseModel):
user_id: Union[str, None] = None
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
async def verify_optional_api_key(X_API_Key: Optional[str] = Header(None)):
if X_API_Key == None: return None
return await verify_api_key(X_API_Key)
def authenticate_user(user_id: str, password: str):
user_service : UserService = container['users']
user_conf = user_service.user_id_to_user_conf.get(user_id, None)
if not user_conf:
return False
if not verify_password(password, user_conf.password):
return False
return user_id
def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None):
to_encode = data.copy()
if expires_delta:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(minutes=15)
to_encode.update({"exp": expire})
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
return encoded_jwt
async def get_current_user(token: str = Depends(oauth2_scheme), user_from_api_key: str = Depends(verify_optional_api_key)) -> User:
if token:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate OAuth2 credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
user_id: str = payload.get("sub")
if user_id is None:
raise credentials_exception
token_data = TokenData(user_id=user_id)
except JWTError:
raise credentials_exception
user_id = token_data.user_id
if user_id is None:
raise credentials_exception
user_service : UserService = container['users']
return user_service.get_user(user_id)
elif user_from_api_key:
logger.info(f"API Key provided: {user_from_api_key}")
user_service : UserService = container['users']
return user_service.get_user(user_from_api_key)
else:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
raise credentials_exception
def verify_permission(permission: str, user: User):
def permissions_exception():
return HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=f"User '{user.user_id}' does not have the permission '{permission}'",
headers={"WWW-Authenticate": "Bearer"},
)
#user is admin
if "admin" in user.permissions: return
#permission is an operation
if ":" not in permission:
if permission not in user.permissions:
raise permissions_exception()
return
#permission is in agency:operation format
def permission_matches(permission, user_permission):
prescribed_agency, prescribed_operation = permission.split(":")
given_agency, given_operation = user_permission.split(":")
return (prescribed_agency == given_agency or given_agency == "all") and (prescribed_operation == given_operation or given_operation == "all")
if any(permission_matches(permission, p) for p in user.permissions if ":" in p): return
raise permissions_exception()
# noinspection PyPep8Naming
# X_API_Key is upper case for OpenAPI
async def verify_api_key(X_API_Key: str = Header(...)):
user_service: UserService = container['users']
return user_service.check_api_key(X_API_Key)
@router.post("/token")
async def login_for_access_token(
form_data: Annotated[OAuth2PasswordRequestForm, Depends()]
) -> Token:
agency = authenticate_user(form_data.username, form_data.password)
if not agency:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"sub": agency}, expires_delta=access_token_expires
)
return Token(access_token=access_token, token_type="bearer")
# TODO: eventually remove this
@router.get("/users/me/", response_model=Agency)
async def read_users_me(
current_agency: Annotated[Agency, Depends(get_current_user)]
):
agency_service : AgencyService = container['agencies']
return agency_service.get_agency(agency_id=current_agency)
# TODO: eventually remove this
@router.get("/users/me/items/")
async def read_own_items(
current_agency: Annotated[str, Depends(get_current_user)]
):
return [{"item_id": "Foo", "owner": current_agency}]

View file

@ -0,0 +1,10 @@
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def verify_password(plain_password, hashed_password):
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password):
return pwd_context.hash(password)

View file

@ -2,15 +2,14 @@ import json
from glob import glob
from typing import Dict
from amarillo.app.models.Carpool import Region
from amarillo.models.Carpool import Region
class RegionService:
def __init__(self):
self.regions: Dict[str, Region] = {}
for region_file_name in glob('conf/region/*.json'):
for region_file_name in glob('data/region/*.json'):
with open(region_file_name) as region_file:
dict = json.load(region_file)
region = Region(**dict)

View file

@ -1,12 +1,10 @@
from typing import Dict
from pydantic import Field
from pydantic import Field, ConfigDict
from pydantic_settings import BaseSettings
# Example: secrets = { "mfdz": "some secret" }
class Secrets(BaseSettings):
model_config = ConfigDict(extra='allow')
ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN')
metrics_user: str = Field(None, env = 'METRICS_USER')
metrics_password: str = Field(None, env = 'METRICS_PASSWORD')
secret_key: str = Field(None, env = 'SECRET_KEY')
# Read if file exists, otherwise no error (it's in .gitignore)

115
amarillo/services/users.py Normal file
View file

@ -0,0 +1,115 @@
import json
import os
from glob import glob
from typing import Dict, List
import logging
from fastapi import HTTPException, status
from amarillo.models.User import User
from amarillo.services.config import config
from amarillo.services.passwords import get_password_hash
logger = logging.getLogger(__name__)
user_conf_directory = 'data/users'
class UserService:
def __init__(self):
# Both Dicts to be kept in sync always. The second api_key_to_agency_id is like a reverse
# cache for the first for fast lookup of valid api keys, which happens on *every* request.
self.user_id_to_user_conf: Dict[str, User] = {}
self.api_key_to_user_id: Dict[str, str] = {}
for user_conf_file_name in glob(f'{user_conf_directory}/*.json'):
with open(user_conf_file_name) as user_conf_file:
dictionary = json.load(user_conf_file)
user_conf = User(**dictionary)
agency_id = user_conf.user_id
api_key = user_conf.api_key
self.user_id_to_user_conf[agency_id] = user_conf
if api_key is not None:
self.api_key_to_user_id[api_key] = user_conf.user_id
def get_user(self, user_id: str) -> User:
user_conf = self.user_id_to_user_conf.get(user_id)
return user_conf
def check_api_key(self, api_key: str) -> str:
"""Check if the API key is valid
The agencies' api keys are checked first, and the admin's key.
The agency_id or "admin" is returned for further checks in the caller if the
request is permitted, like {agency_id} == agency_id.
"""
agency_id = self.api_key_to_user_id.get(api_key)
is_agency = agency_id is not None
if is_agency:
return agency_id
is_admin = api_key == config.admin_token
if is_admin:
return "admin"
message = "X-API-Key header invalid"
logger.error(message)
raise HTTPException(status_code=400, detail=message)
def add(self, user_conf: User):
user_id = user_conf.user_id
api_key = user_conf.api_key
agency_id_exists_already = self.user_id_to_user_conf.get(user_id) is not None
if agency_id_exists_already:
message = f"Agency {user_id} exists already. To update, delete it first."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
agency_using_this_api_key_already = self.api_key_to_user_id.get(api_key)
a_different_agency_is_using_this_api_key_already = \
agency_using_this_api_key_already is not None and \
agency_using_this_api_key_already != user_id
if a_different_agency_is_using_this_api_key_already:
message = f"Duplicate API Key for {user_id} not permitted. Use a different key."
logger.error(message)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
user_conf.password = get_password_hash(user_conf.password)
with open(f'{user_conf_directory}/{user_id}.json', 'w', encoding='utf-8') as f:
f.write(user_conf.json())
self.user_id_to_user_conf[user_id] = user_conf
self.api_key_to_user_id[api_key] = user_id
logger.info(f"Added configuration for user {user_id}.")
def get_user_ids(self) -> List[str]:
return list(self.user_id_to_user_conf.keys())
def delete(self, user_id):
user_conf = self.user_id_to_user_conf.get(user_id)
api_key = user_conf.api_key
del self.api_key_to_user_id[api_key]
del self.user_id_to_user_conf[user_id]
os.remove(f'{user_conf_directory}/{user_id}.json')
logger.info(f"Deleted configuration for {user_id}.")

View file

@ -0,0 +1,5 @@
[
{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/stops_zhv.csv", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/parkings_osm.csv", "vicinity": 500}
]

View file

@ -1,3 +1,5 @@
# Bounding-Box Germany
ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }'
env = 'PROD'
graphhopper_base_url = 'https://api.mfdz.de/gh'
stop_sources_file = 'conf/stop_sources.json'

View file

@ -2,14 +2,14 @@
keys=root
[handlers]
keys=consoleHandler
keys=consoleHandler, fileHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=INFO
handlers=consoleHandler
handlers=consoleHandler, fileHandler
propagate=yes
[handler_consoleHandler]
@ -18,5 +18,11 @@ level=DEBUG
formatter=simpleFormatter
args=(sys.stdout,)
[handler_fileHandler]
class=handlers.RotatingFileHandler
level=ERROR
formatter=simpleFormatter
args=('error.log', 'a', 1000000, 3) # Filename, mode, maxBytes, backupCount
[formatter_simpleFormatter]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s

View file

@ -0,0 +1,38 @@
from fastapi.testclient import TestClient
from amarillo.main import app
from amarillo.tests.sampledata import carpool_1234, data1
client = TestClient(app)
# TODO FG: This test needs a clean temporary storage folder, not the hard coded data dir.
def test_doc():
response = client.get("/openapi.json")
assert response.status_code == 200
def test_get_mfdz_0():
response = client.get("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with agency mfdz and id 0 does not exist."}
def test_delete_mfdz_0():
response = client.delete("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with id 0 does not exist."}
def test_post():
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The carpool should not exist yet"
response = client.post("/carpool/", json=data1)
assert response.status_code == 200, "The first post must work with 200"
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "After post, the get must work"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "The first delete must work with 200"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The second delete must fail"

View file

@ -1,4 +1,4 @@
from amarillo.app.models.Carpool import Carpool, StopTime, Weekday
from amarillo.models.Carpool import Carpool, StopTime, Weekday
# TODO use meanigful values for id and lat, lon
stops_1234 = [

View file

@ -0,0 +1,37 @@
from fastapi import HTTPException
import pytest
from amarillo.services.oauth2 import verify_permission
from amarillo.models.User import User
test_user = User(user_id="test", password="testpassword", permissions=["all:read", "mfdz:write", "ride2go:all", "gtfs"])
admin_user = User(user_id="admin", password="testpassword", permissions=["admin"])
def test_operation():
verify_permission("gtfs", test_user)
with pytest.raises(HTTPException):
verify_permission("geojson", test_user)
def test_agency_permission():
verify_permission("mvv:read", test_user)
verify_permission("mfdz:read", test_user)
verify_permission("mfdz:write", test_user)
verify_permission("ride2go:write", test_user)
with pytest.raises(HTTPException):
verify_permission("mvv:write", test_user)
verify_permission("mvv:all", test_user)
def test_admin():
verify_permission("admin", admin_user)
verify_permission("gtfs", admin_user)
verify_permission("all:all", admin_user)
verify_permission("mvv:all", admin_user)
verify_permission("mfdz:read", admin_user)
verify_permission("mfdz:write", admin_user)
verify_permission("ride2go:write", admin_user)
with pytest.raises(HTTPException):
verify_permission("admin", test_user)
verify_permission("all:all", test_user)

View file

@ -1,6 +1,5 @@
import os
import re
import os
import shutil
from pathlib import Path
import logging
@ -50,7 +49,7 @@ def geodesic_distance_in_m(coord1, coord2):
def copy_static_files(files_and_dirs_to_copy):
amarillo_dir = Path(__file__).parents[2]
amarillo_dir = Path(__file__).parents[1]
source_dir = os.path.join(amarillo_dir, "static")
destination_dir = os.getcwd()
@ -59,6 +58,9 @@ def copy_static_files(files_and_dirs_to_copy):
source_path = os.path.join(source_dir, item)
destination_path = os.path.join(destination_dir, item)
if not os.path.exists(source_path):
raise FileNotFoundError(source_path)
if os.path.exists(destination_path):
# logger.info(f"{item} already exists")
continue

View file

@ -1,17 +1,24 @@
[project]
name = "amarillo-core"
version = "0.0.14"
name = "amarillo"
version = "0.0.15a4"
description = "Aggregates and enhances carpooling-offers and publishes them as GTFS(-RT)"
readme = "README.md"
license = {file = "LICENSE"}
keywords = ["amarillo", "ridesharing", "carpooling", "gtfs", "gtfs-rt"]
dependencies = [
"fastapi[all]==0.104.0",
"fastapi[all]==0.109.0",
"geopandas==0.14",
"uvicorn[standard]==0.23.2",
"pydantic[dotenv]==2.4.2",
"protobuf==3.20.3",
"starlette",
"starlette~=0.35",
"requests==2.31.0",
"pyproj==3.6.1",
"geojson-pydantic==1.0.1",
"pytest",
"watchdog==3.0.0",
"python-jose[cryptography]",
"bcrypt==4.0.1",
"passlib[bcrypt]"
]
[tool.setuptools.packages]

View file

@ -1,10 +1,13 @@
fastapi[all]==0.104.0
fastapi[all]==0.109.0
geopandas==0.14
uvicorn[standard]==0.23.2
pydantic[dotenv]==2.4.2
protobuf==3.20.3
starlette
starlette~=0.35
requests==2.31.0
pyproj==3.6.1
geojson-pydantic==1.0.1
pytest
pytest
python-jose[cryptography]
bcrypt==4.0.1
passlib[bcrypt]