rename /app to /amarillo

This commit is contained in:
Csaba 2024-02-13 10:47:03 +01:00
parent ddca809530
commit 3de2621b90
57 changed files with 345 additions and 896 deletions

View file

@ -30,13 +30,13 @@ RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
RUN --mount=type=secret,id=AMARILLO_REGISTRY_CREDENTIALS \ RUN --mount=type=secret,id=AMARILLO_REGISTRY_CREDENTIALS \
pip install --no-cache-dir --upgrade --extra-index-url https://$(cat /run/secrets/AMARILLO_REGISTRY_CREDENTIALS)@${PACKAGE_REGISTRY_URL} ${PLUGINS} pip install --no-cache-dir --upgrade --extra-index-url https://$(cat /run/secrets/AMARILLO_REGISTRY_CREDENTIALS)@${PACKAGE_REGISTRY_URL} ${PLUGINS}
COPY ./amarillo/app /app/amarillo/app COPY ./amarillo /app/amarillo
COPY ./amarillo/plugins /app/amarillo/plugins COPY ./amarillo/plugins /app/amarillo/plugins
COPY ./amarillo/static/static /app/static COPY ./amarillo/static/static /app/static
COPY ./amarillo/static/templates /app/templates COPY ./amarillo/static/templates /app/templates
COPY ./amarillo/static/config /app COPY ./amarillo/static/config /app
COPY ./amarillo/static/logging.conf /app COPY ./amarillo/static/logging.conf /app
COPY ./conf /app/conf COPY ./amarillo/static/conf /app/conf
# This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead # This image inherits uvicorn-gunicorn's CMD. If you'd like to start uvicorn, use this instead
# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] # CMD ["uvicorn", "amarillo.main:app", "--host", "0.0.0.0", "--port", "8000"]

View file

@ -1,2 +1,2 @@
recursive-include amarillo/static/ * recursive-include amarillo/static/ *
recursive-include amarillo/app/tests/ * recursive-include amarillo/tests/ *

View file

@ -9,9 +9,11 @@ An Amarillo is a [yellow-dressed person](https://www.dreamstime.com/sancti-spiri
- Python 3.9.2 with pip - Python 3.9.2 with pip
- python3-venv - python3-venv
Create a virtual environment `python3 -m venv venv`. Activate the environment with `source venv/bin/activate` and install the dependencies `pip install -r requirements.txt`. Create a virtual environment `python3 -m venv venv`.
Run `uvicorn amarillo.app.main:app`. Activate the environment and install the dependencies `pip install -r requirements.txt`.
Run `uvicorn amarillo.main:app`.
In development, you can use `--reload`. In development, you can use `--reload`.
@ -20,8 +22,6 @@ In development, you can use `--reload`.
- `env` - `env`
- `ADMIN_TOKEN` - `ADMIN_TOKEN`
E.g. set the environment variable like this: `export ADMIN_TOKEN=YOUR_SECRET_TOKEN_HERE`.
## Security ## Security
All endpoints are protected by an API-Key in the HTTP header. All endpoints are protected by an API-Key in the HTTP header.
@ -44,23 +44,23 @@ Permissions work this way
### GTFS-RT python bindings ### GTFS-RT python bindings
In case you modify or update the proto-files in app/proto, you'll need to regenerate the python bindings. First, create the python files: In case you modify or update the proto-files in amarillo/proto, you'll need to regenerate the python bindings. First, create the python files:
```sh ```sh
$ cd app/proto $ cd amarillo/proto
$ protoc --version $ protoc --version
libprotoc 3.21.6 libprotoc 3.21.6
$ protoc --proto_path=. --python_out=../services/gtfsrt gtfs-realtime.proto realtime_extension.proto $ protoc --proto_path=. --python_out=../services/gtfsrt gtfs-realtime.proto realtime_extension.proto
$ sed 's/import gtfs_realtime_pb2/import app.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py $ sed 's/import gtfs_realtime_pb2/import amarillo.services.gtfsrt.gtfs_realtime_pb2/g' ../services/gtfsrt/realtime_extension_pb2.py | sponge ../services/gtfsrt/realtime_extension_pb2.py
``` ```
## Testing ## Testing
In the top directory, run `pytest app/tests`. In the top directory, run `pytest amarillo/tests`.
## Docker ## Docker
Based on [tiangolo/uvicorn-gunicorn:python3.9-slim](https://github.com/tiangolo/uvicorn-gunicorn-docker) Based on [tiangolo/uvicorn-gunicorn:python3.9-slim](https://github.com/tiangolo/uvicorn-gunicorn-docker)
- build `docker build -t amarillo -t latest --build-arg='PACKAGE_REGISTRY_URL=$PACKAGE_REGISTRY_URL' --build-arg="PLUGINS=amarillo-metrics amarillo-enhancer" --secret id=AMARILLO_REGISTRY_CREDENTIALS,src=credentials . ` - build `docker build -t amarillo --build-arg="PLUGINS=amarillo-metrics" .`
- run `docker run --rm --name amarillo -p 8000:80 -e MODULE_NAME=amarillo.app.main -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e METRICS_USER=$METRICS_USER -e METRICS_PASSWORD=$METRICS_PASSWORD -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo` - run `docker run --rm --name amarillo -p 8000:80 -e MAX_WORKERS="1" -e ADMIN_TOKEN=$ADMIN_TOKEN -e RIDE2GO_TOKEN=$RIDE2GO_TOKEN -e TZ=Europe/Berlin -v $(pwd)/data:/app/data amarillo`

1
amarillo/__init__.py Normal file
View file

@ -0,0 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

View file

@ -1,32 +0,0 @@
from collections import namedtuple
from datetime import timedelta
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_short_name')
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id driver_id service_id shape_id trip_headsign bikes_allowed trip_url')
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
GtfsDriver = namedtuple('GtfsDriver','driver_id profile_picture rating')
GtfsAdditionalRidesharingInfo = namedtuple('GtfsAdditionalRidesharingInfo','trip_id number_free_seats same_gender luggage_size animal_car car_model car_brand creation_date smoking payment_method')
# TODO Move to utils
class GtfsTimeDelta(timedelta):
def __str__(self):
seconds = self.total_seconds()
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = seconds % 60
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
return (str)
def __add__(self, other):
if isinstance(other, timedelta):
return self.__class__(self.days + other.days,
self.seconds + other.seconds,
self.microseconds + other.microseconds)
return NotImplemented

View file

@ -1,88 +0,0 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.app.models.Carpool import Region
from amarillo.app.routers.agencyconf import verify_admin_api_key, verify_api_key
from amarillo.app.services.regions import RegionService
from amarillo.app.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region
@router.get("/{region_id}/gtfs",
summary="Return GTFS Feed for this region",
response_description="GTFS-Feed (zip-file)",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
}
)
async def get_file(region_id: str, user: str = Depends(verify_api_key)):
_assert_region_exists(region_id)
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
@router.get("/{region_id}/gtfs-rt",
summary="Return GTFS-RT Feed for this region",
response_description="GTFS-RT-Feed",
response_class=FileResponse,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
}
)
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_api_key)):
_assert_region_exists(region_id)
if format == 'json':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
elif format == 'protobuf':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
else:
message = "Specified format is not supported, i.e. neither protobuf nor json."
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)

View file

@ -1,137 +0,0 @@
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs_realtime_pb2
import amarillo.app.services.gtfsrt.realtime_extension_pb2 as mfdzrte
from amarillo.app.services.gtfs_constants import *
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import ParseDict
from datetime import datetime, timedelta
import json
import re
import time
class GtfsRtProducer():
def __init__(self, trip_store):
self.trip_store = trip_store
def generate_feed(self, time, format='protobuf', bbox=None):
# See https://developers.google.com/transit/gtfs-realtime/reference
# https://github.com/mfdz/carpool-gtfs-rt/blob/master/src/main/java/de/mfdz/resource/CarpoolResource.java
gtfsrt_dict = {
'header': {
'gtfsRealtimeVersion': '1.0',
'timestamp': int(time)
},
'entity': self._get_trip_updates(bbox)
}
feed = gtfs_realtime_pb2.FeedMessage()
ParseDict(gtfsrt_dict, feed)
if "message" == format.lower():
return feed
elif "json" == format.lower():
return MessageToDict(feed)
else:
return feed.SerializeToString()
def export_feed(self, timestamp, file_path, bbox=None):
"""
Exports gtfs-rt feed as .json and .pbf file to file_path
"""
feed = self.generate_feed(timestamp, "message", bbox)
with open(f"{file_path}.pbf", "wb") as f:
f.write(feed.SerializeToString())
with open(f"{file_path}.json", "w") as f:
json.dump(MessageToDict(feed), f)
def _get_trip_updates(self, bbox = None):
trips = []
trips.extend(self._get_added(bbox))
trips.extend(self._get_deleted(bbox))
trip_updates = []
for num, trip in enumerate(trips):
trip_updates.append( {
'id': f'carpool-update-{num}',
'tripUpdate': trip
}
)
return trip_updates
def _get_deleted(self, bbox = None):
return self._get_updates(
self.trip_store.recently_deleted_trips(),
self._as_delete_updates,
bbox)
def _get_added(self, bbox = None):
return self._get_updates(
self.trip_store.recently_added_trips(),
self._as_added_updates,
bbox)
def _get_updates(self, trips, update_func, bbox = None):
updates = []
today = datetime.today()
for t in trips:
if bbox == None or t.intersects(bbox):
updates.extend(update_func(t, today))
return updates
def _as_delete_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'CANCELED',
'routeId': trip.trip_id
}
} for trip_date in trip.next_trip_dates(fromdate)]
def _to_seconds(self, fromdate, stop_time):
startdate = datetime.strptime(fromdate, '%Y%m%d')
m = re.search(r'(\d+):(\d+):(\d+)', stop_time)
delta = timedelta(
hours=int(m.group(1)),
minutes=int(m.group(2)),
seconds=int(m.group(3)))
return time.mktime((startdate + delta).timetuple())
def _to_stop_times(self, trip, fromdate):
return [{
'stopSequence': stoptime.stop_sequence,
'arrival': {
'time': self._to_seconds(fromdate, stoptime.arrival_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'departure': {
'time': self._to_seconds(fromdate, stoptime.departure_time),
'uncertainty': MFDZ_DEFAULT_UNCERTAINITY
},
'stopId': stoptime.stop_id,
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER' if stoptime.drop_off_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER' if stoptime.pickup_type == STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER else 'NONE'
}
}
}
for stoptime in trip.stop_times]
def _as_added_updates(self, trip, fromdate):
return [{
'trip': {
'tripId': trip.trip_id,
'startTime': trip.start_time_str(),
'startDate': trip_date,
'scheduleRelationship': 'ADDED',
'routeId': trip.trip_id,
'[transit_realtime.trip_descriptor]': {
'routeUrl' : trip.url,
'agencyId' : trip.agency,
'route_long_name' : trip.route_long_name(),
'route_type': RIDESHARING_ROUTE_TYPE
}
},
'stopTimeUpdate': self._to_stop_times(trip, trip_date)
} for trip_date in trip.next_trip_dates(fromdate)]

View file

@ -1,14 +0,0 @@
# Constants
NO_BIKES_ALLOWED = 2
RIDESHARING_ROUTE_TYPE = 1551
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
STOP_TIMES_STOP_TYPE_REGULARLY = 0
STOP_TIMES_STOP_TYPE_NONE = 1
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
STOP_TIMES_TIMEPOINT_EXACT = 1
MFDZ_DEFAULT_UNCERTAINITY = 600

File diff suppressed because one or more lines are too long

View file

@ -1,33 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: realtime_extension.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import amarillo.app.services.gtfsrt.gtfs_realtime_pb2 as gtfs__realtime__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18realtime_extension.proto\x12\x10transit_realtime\x1a\x13gtfs-realtime.proto\"p\n\x1bMfdzTripDescriptorExtension\x12\x11\n\troute_url\x18\x01 \x01(\t\x12\x11\n\tagency_id\x18\x02 \x01(\t\x12\x17\n\x0froute_long_name\x18\x03 \x01(\t\x12\x12\n\nroute_type\x18\x04 \x01(\r\"\xb0\x02\n\x1fMfdzStopTimePropertiesExtension\x12X\n\x0bpickup_type\x18\x01 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\x12Y\n\x0c\x64ropoff_type\x18\x02 \x01(\x0e\x32\x43.transit_realtime.MfdzStopTimePropertiesExtension.DropOffPickupType\"X\n\x11\x44ropOffPickupType\x12\x0b\n\x07REGULAR\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x10\n\x0cPHONE_AGENCY\x10\x02\x12\x1a\n\x16\x43OORDINATE_WITH_DRIVER\x10\x03:i\n\x0ftrip_descriptor\x12 .transit_realtime.TripDescriptor\x18\xf5\x07 \x01(\x0b\x32-.transit_realtime.MfdzTripDescriptorExtension:\x90\x01\n\x14stop_time_properties\x12>.transit_realtime.TripUpdate.StopTimeUpdate.StopTimeProperties\x18\xf5\x07 \x01(\x0b\x32\x31.transit_realtime.MfdzStopTimePropertiesExtensionB\t\n\x07\x64\x65.mfdz')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'realtime_extension_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
gtfs__realtime__pb2.TripDescriptor.RegisterExtension(trip_descriptor)
gtfs__realtime__pb2.TripUpdate.StopTimeUpdate.StopTimeProperties.RegisterExtension(stop_time_properties)
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\007de.mfdz'
_MFDZTRIPDESCRIPTOREXTENSION._serialized_start=67
_MFDZTRIPDESCRIPTOREXTENSION._serialized_end=179
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_start=182
_MFDZSTOPTIMEPROPERTIESEXTENSION._serialized_end=486
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_start=398
_MFDZSTOPTIMEPROPERTIESEXTENSION_DROPOFFPICKUPTYPE._serialized_end=486
# @@protoc_insertion_point(module_scope)

View file

@ -1,47 +0,0 @@
import requests
import logging
logger = logging.getLogger(__name__)
class RoutingException(Exception):
def __init__(self, message):
# Call Exception.__init__(message)
# to use the same Message header as the parent class
super().__init__(message)
class RoutingService():
def __init__(self, gh_url = 'https://api.mfdz.de/gh'):
self.gh_service_url = gh_url
def path_for_stops(self, points):
# Retrieve graphhopper route traversing given points
directions = self._get_directions(points)
if directions and len(directions.get("paths"))>0:
return directions.get("paths")[0]
else:
return {}
def _get_directions(self, points):
req_url = self._create_url(points, True, True)
logger.debug("Get directions via: {}".format(req_url))
response = requests.get(req_url)
status = response.status_code
if status == 200:
# Found route between points
return response.json()
else:
try:
message = response.json().get('message')
except:
raise RoutingException("Get directions failed with status code {}".format(status))
else:
raise RoutingException(message)
def _create_url(self, points, calc_points = False, instructions = False):
""" Creates GH request URL """
locations = ""
for point in points:
locations += "point={0}%2C{1}&".format(point.y, point.x)
return "{0}/route?{1}instructions={2}&calc_points={3}&points_encoded=false".format(
self.gh_service_url, locations, instructions, calc_points)

View file

@ -1,182 +0,0 @@
import csv
import geopandas as gpd
import pandas as pd
from amarillo.app.models.Carpool import StopTime
from contextlib import closing
from shapely.geometry import Point, LineString
from shapely.ops import transform
from pyproj import Proj, Transformer
import re
import requests
from io import TextIOWrapper
import codecs
import logging
logger = logging.getLogger(__name__)
class StopsStore():
def __init__(self, stop_sources = [], internal_projection = "EPSG:32632"):
self.internal_projection = internal_projection
self.projection = Transformer.from_crs("EPSG:4326", internal_projection, always_xy=True).transform
self.stopsDataFrames = []
self.stop_sources = stop_sources
def load_stop_sources(self):
"""Imports stops from stop_sources and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m.
Subsequent calls of load_stop_sources will reload all stop_sources
but replace the current stops only if all stops could be loaded successfully.
"""
stopsDataFrames = []
error_occured = False
for stops_source in self.stop_sources:
try:
stopsDataFrame =self._load_stops(stops_source["url"])
stopsDataFrames.append({'distanceInMeter': stops_source["vicinity"],
'stops': stopsDataFrame})
except Exception as err:
error_occured = True
logger.error("Failed to load stops from %s to StopsStore.", stops_source["url"], exc_info=True)
if not error_occured:
self.stopsDataFrames = stopsDataFrames
def find_additional_stops_around(self, line, stops = None):
"""Returns a GeoDataFrame with all stops in vicinity of the
given line, sorted by distance from origin of the line.
Note: for internal projection/distance calculations, the
lat/lon geometries of line and stops are converted to
"""
stops_frames = []
if stops:
stops_frames.append(self._convert_to_dataframe(stops))
transformedLine = transform(self.projection, LineString(line.coordinates))
for stops_to_match in self.stopsDataFrames:
stops_frames.append(self._find_stops_around_transformed(stops_to_match['stops'], transformedLine, stops_to_match['distanceInMeter']))
stops = gpd.GeoDataFrame( pd.concat(stops_frames, ignore_index=True, sort=True))
if not stops.empty:
self._sort_by_distance(stops, transformedLine)
return stops
def find_closest_stop(self, carpool_stop, max_search_distance):
transformedCoord = Point(self.projection(carpool_stop.lon, carpool_stop.lat))
best_dist = max_search_distance + 1
best_stop = None
for stops_with_dist in self.stopsDataFrames:
stops = stops_with_dist['stops']
s, d = stops.sindex.nearest(transformedCoord, return_all= True, return_distance=True, max_distance=max_search_distance)
if len(d) > 0 and d[0] < best_dist:
best_dist = d[0]
row = s[1][0]
best_stop = StopTime(name=stops.at[row, 'stop_name'], lat=stops.at[row, 'y'], lon=stops.at[row, 'x'])
return best_stop if best_stop else carpool_stop
def _normalize_stop_name(self, stop_name):
default_name = 'P+R-Parkplatz'
if stop_name in ('', 'Park&Ride'):
return default_name
normalized_stop_name = re.sub(r"P(ark)?\s?[\+&]\s?R(ail|ide)?",'P+R', stop_name)
return normalized_stop_name
def _load_stops(self, source : str):
"""Loads stops from given source and registers them with
the distance they are still associated with a trip.
E.g. bus stops should be registered with a distance of e.g. 30m,
while larger carpool parkings might be registered with e.g. 500m
"""
logger.info("Load stops from %s", source)
if source.startswith('http'):
if source.endswith('json'):
with requests.get(source) as json_source:
stopsDataFrame = self._load_stops_geojson(json_source.json())
else:
with requests.get(source) as csv_source:
stopsDataFrame = self._load_stops_csv(codecs.iterdecode(csv_source.iter_lines(), 'utf-8'))
else:
with open(source, encoding='utf-8') as csv_source:
stopsDataFrame = self._load_stops_csv(csv_source)
return stopsDataFrame
def _load_stops_csv(self, csv_source):
id = []
lat = []
lon = []
stop_name = []
reader = csv.DictReader(csv_source, delimiter=';')
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in reader:
for col, lst in zip(columns, lists):
if col == "stop_lat" or col == "stop_lon":
lst.append(float(row[col].replace(",",".")))
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row[col])
lst.append(row_stop_name)
else:
lst.append(row[col])
return self._as_dataframe(id, lat, lon, stop_name)
def _load_stops_geojson(self, geojson_source):
id = []
lat = []
lon = []
stop_name = []
columns = ['stop_id', 'stop_lat', 'stop_lon', 'stop_name']
lists = [id, lat, lon, stop_name]
for row in geojson_source['features']:
coord = row['geometry']['coordinates']
if not coord or not row['properties'].get('name'):
logger.error('Stop feature {} has null coord or name'.format(row['id']))
continue
for col, lst in zip(columns, lists):
if col == "stop_lat":
lst.append(coord[1])
elif col == "stop_lon":
lst.append(coord[0])
elif col == "stop_name":
row_stop_name = self._normalize_stop_name(row['properties']['name'])
lst.append(row_stop_name)
elif col == "stop_id":
lst.append(row['id'])
return self._as_dataframe(id, lat, lon, stop_name)
def _as_dataframe(self, id, lat, lon, stop_name):
df = gpd.GeoDataFrame(data={'x':lon, 'y':lat, 'stop_name':stop_name, 'id':id})
stopsGeoDataFrame = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.x, df.y, crs='EPSG:4326'))
stopsGeoDataFrame.to_crs(crs=self.internal_projection, inplace=True)
return stopsGeoDataFrame
def _find_stops_around_transformed(self, stopsDataFrame, transformedLine, distance):
bufferedLine = transformedLine.buffer(distance)
sindex = stopsDataFrame.sindex
possible_matches_index = list(sindex.intersection(bufferedLine.bounds))
possible_matches = stopsDataFrame.iloc[possible_matches_index]
exact_matches = possible_matches[possible_matches.intersects(bufferedLine)]
return exact_matches
def _convert_to_dataframe(self, stops):
return gpd.GeoDataFrame([[stop.name, stop.lon, stop.lat,
stop.id, Point(self.projection(stop.lon, stop.lat))] for stop in stops], columns = ['stop_name','x','y','id','geometry'], crs=self.internal_projection)
def _sort_by_distance(self, stops, transformedLine):
stops['distance']=stops.apply(lambda row: transformedLine.project(row['geometry']), axis=1)
stops.sort_values('distance', inplace=True)
def is_carpooling_stop(stop_id, name):
stop_name = name.lower()
# mfdz: or bbnavi: prefixed stops are custom stops which are explicitly meant to be carpooling stops
return stop_id.startswith('mfdz:') or stop_id.startswith('bbnavi:') or 'mitfahr' in stop_name or 'p&m' in stop_name

View file

@ -1,5 +0,0 @@
stop_id;stop_code;stop_lat;stop_lon;stop_name
mfdz:x;x;52.11901;14.2;Stop x
mfdz:y;y;53.1;14.01;Stop y
mfdz:z;z;54.11;14.0;Stop z
mfdz:Ang001;Ang001;53.11901;14.015776;Mitfahrbank Biesenbrow
1 stop_id stop_code stop_lat stop_lon stop_name
2 mfdz:x x 52.11901 14.2 Stop x
3 mfdz:y y 53.1 14.01 Stop y
4 mfdz:z z 54.11 14.0 Stop z
5 mfdz:Ang001 Ang001 53.11901 14.015776 Mitfahrbank Biesenbrow

View file

@ -1,39 +0,0 @@
{
"data": {
"pointsOfInterest": [
{
"id": "14622",
"externalId": "bbnavi:12073:0001",
"name": "Parkbank",
"description": "Parkbank",
"dataProvider": {
"id": "1",
"name": "Administrator"
},
"addresses": [
{
"street": "Hauptstrasse",
"city": "Wittenberge",
"zip": "12345",
"geoLocation": {
"latitude": 52.9932971109789,
"longitude": 11.767383582547
}
}
],
"openStreetMap": {
"capacity": 112,
"capacityCharging": "2",
"capacityDisabled": "",
"fee": "No",
"lit": "Yes",
"parking": "",
"shelter": "No",
"surface": "",
"utilization": "",
"website": ""
}
}
]
}
}

View file

@ -1,24 +0,0 @@
from amarillo.app.services import stops
from amarillo.app.models.Carpool import StopTime
def test_load_stops_from_file():
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_load_csv_stops_from_web_():
store = stops.StopsStore([{"url": "https://data.mfdz.de/mfdz/stops/custom.csv", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_load_geojson_stops_from_web_():
store = stops.StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
store.load_stop_sources()
assert len(store.stopsDataFrames[0]['stops']) > 0
def test_find_closest_stop():
store = stops.StopsStore([{"url": "amarillo/app/tests/stops.csv", "vicinity": 50}])
store.load_stop_sources()
carpool_stop = StopTime(name="start", lat=53.1191, lon=14.01577)
stop = store.find_closest_stop(carpool_stop, 1000)
assert stop.name=='Mitfahrbank Biesenbrow'

View file

@ -1,23 +0,0 @@
from amarillo.app.tests.sampledata import cp1, carpool_repeating
from amarillo.app.services.trips import TripStore
from amarillo.app.services.stops import StopsStore
import logging
logger = logging.getLogger(__name__)
def test_trip_store_put_one_time_carpool():
trip_store = TripStore(StopsStore())
t = trip_store.put_carpool(cp1)
assert t != None
assert len(t.stop_times) >= 2
assert t.stop_times[0].stop_id == 'mfdz:12073:001'
assert t.stop_times[-1].stop_id == 'de:12073:900340137::3'
def test_trip_store_put_repeating_carpool():
trip_store = TripStore(StopsStore())
t = trip_store.put_carpool(carpool_repeating)
assert t != None
assert len(t.stop_times) >= 2

View file

@ -1,14 +1,14 @@
# separate file so that it can be imported without initializing FastAPI # separate file so that it can be imported without initializing FastAPI
from amarillo.app.utils.container import container from amarillo.utils.container import container
import logging import logging
from amarillo.app.services.agencyconf import AgencyConfService, agency_conf_directory from amarillo.services.agencyconf import AgencyConfService, agency_conf_directory
from amarillo.app.services.agencies import AgencyService from amarillo.services.agencies import AgencyService
from amarillo.app.services.regions import RegionService from amarillo.services.regions import RegionService
from amarillo.app.services.config import config from amarillo.services.config import config
from amarillo.app.utils.utils import assert_folder_exists from amarillo.utils.utils import assert_folder_exists
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -6,20 +6,17 @@ import uvicorn
import mimetypes import mimetypes
from starlette.staticfiles import StaticFiles from starlette.staticfiles import StaticFiles
from amarillo.utils.utils import copy_static_files
from amarillo.app.utils.utils import copy_static_files
#this has to run before app.configuration is imported, otherwise we get validation error for config because the config file is not copied yet #this has to run before app.configuration is imported, otherwise we get validation error for config because the config file is not copied yet
copy_static_files(["conf", "static", "templates", "logging.conf", "config"]) copy_static_files(["conf", "static", "templates", "logging.conf", "config"])
import amarillo.plugins import amarillo.plugins
from amarillo.app.configuration import configure_services, configure_admin_token from amarillo.configuration import configure_services, configure_admin_token
from amarillo.app.routers import carpool, agency, agencyconf, region from amarillo.routers import carpool, agency, agencyconf, region
from fastapi import FastAPI from fastapi import FastAPI
from amarillo.app.views import home
# https://pydantic-docs.helpmanual.io/usage/settings/ # https://pydantic-docs.helpmanual.io/usage/settings/
# from amarillo.app.views import home from amarillo.views import home
logging.config.fileConfig('logging.conf', disable_existing_loggers=False) logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
logger = logging.getLogger("main") logger = logging.getLogger("main")
@ -57,6 +54,10 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
}, },
}], }],
servers=[ servers=[
{
"description": "MobiData BW Amarillo service",
"url": "https://amarillo.mobidata-bw.de"
},
{ {
"description": "DABB bbnavi Amarillo service", "description": "DABB bbnavi Amarillo service",
"url": "https://amarillo.bbnavi.de" "url": "https://amarillo.bbnavi.de"
@ -69,10 +70,6 @@ app = FastAPI(title="Amarillo - The Carpooling Intermediary",
"description": "Dev server for development", "description": "Dev server for development",
"url": "https://amarillo-dev.mfdz.de" "url": "https://amarillo-dev.mfdz.de"
}, },
{
"description": "Server for Mitanand project",
"url": "https://mitanand.mfdz.de"
},
{ {
"description": "Localhost for development", "description": "Localhost for development",
"url": "http://localhost:8000" "url": "http://localhost:8000"
@ -97,14 +94,14 @@ def load_plugins():
for finder, name, ispkg for finder, name, ispkg
in iter_namespace(amarillo.plugins) in iter_namespace(amarillo.plugins)
} }
print(f"Discovered plugins: {list(discovered_plugins.keys())}") logger.info(f"Discovered plugins: {list(discovered_plugins.keys())}")
for name, module in discovered_plugins.items(): for name, module in discovered_plugins.items():
if hasattr(module, "setup"): if hasattr(module, "setup"):
print(f"Running setup function for {name}") logger.info(f"Running setup function for {name}")
module.setup(app) module.setup(app)
else: print(f"Did not find setup function for {name}") else: logger.info(f"Did not find setup function for {name}")
def configure(): def configure():
configure_admin_token() configure_admin_token()
@ -125,4 +122,3 @@ if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000) uvicorn.run(app, host="0.0.0.0", port=8000)
else: else:
configure() configure()
pass

View file

@ -1,121 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Test notebook for discovering and importing plugins"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'amarillo.plugins.metrics': <module 'amarillo.plugins.metrics' from '/home/user/amarillo/amarillo-plugins/amarillo-metrics/amarillo/plugins/metrics/__init__.py'>}"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import importlib\n",
"import pkgutil\n",
"\n",
"import amarillo.plugins # FIXME this namespace does not exist if there are 0 plugins installed\n",
"\n",
"def iter_namespace(ns_pkg):\n",
" # Source: https://packaging.python.org/guides/creating-and-discovering-plugins/\n",
" return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + \".\")\n",
"\n",
"discovered_plugins = {\n",
" name: importlib.import_module(name)\n",
" for finder, name, ispkg\n",
" in iter_namespace(amarillo.plugins)\n",
"}\n",
"\n",
"discovered_plugins"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['__name__',\n",
" '__doc__',\n",
" '__package__',\n",
" '__loader__',\n",
" '__spec__',\n",
" '__path__',\n",
" '__file__',\n",
" '__cached__',\n",
" '__builtins__',\n",
" 'metrics',\n",
" 'json',\n",
" 'logging',\n",
" 'os',\n",
" 'random',\n",
" 'Callable',\n",
" 'APIRouter',\n",
" 'HTTPException',\n",
" 'Depends',\n",
" 'Request',\n",
" 'datetime',\n",
" 'generate_latest',\n",
" 'Gauge',\n",
" 'Counter',\n",
" 'Info',\n",
" 'FastAPI',\n",
" 'HTTPBasic',\n",
" 'HTTPBasicCredentials',\n",
" 'PlainTextResponse',\n",
" 'secrets',\n",
" 'logger',\n",
" 'security',\n",
" 'amarillo_trips_number_total',\n",
" 'router']"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"metrics = discovered_plugins['amarillo.plugins.metrics']\n",
"\n",
"metrics.__dir__()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View file

@ -4,13 +4,13 @@ from typing import List
from fastapi import APIRouter, HTTPException, status, Depends from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.app.models.Carpool import Carpool, Agency from amarillo.models.Carpool import Carpool, Agency
from amarillo.app.routers.agencyconf import verify_api_key, verify_admin_api_key, verify_permission_for_same_agency_or_admin from amarillo.routers.agencyconf import verify_api_key, verify_admin_api_key, verify_permission_for_same_agency_or_admin
# TODO should move this to service # TODO should move this to service
from amarillo.app.routers.carpool import store_carpool, delete_agency_carpools_older_than from amarillo.routers.carpool import store_carpool, delete_agency_carpools_older_than
from amarillo.app.services.agencies import AgencyService from amarillo.services.agencies import AgencyService
from amarillo.app.services.importing.ride2go import import_ride2go from amarillo.services.importing.ride2go import import_ride2go
from amarillo.app.utils.container import container from amarillo.utils.container import container
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -52,6 +52,7 @@ async def get_agency(agency_id: str, admin_api_key: str = Depends(verify_api_key
operation_id="sync", operation_id="sync",
summary="Synchronizes all carpool offers", summary="Synchronizes all carpool offers",
response_model=List[Carpool], response_model=List[Carpool],
response_model_exclude_none=True,
responses={ responses={
status.HTTP_200_OK: { status.HTTP_200_OK: {
"description": "Carpool created"}, "description": "Carpool created"},

View file

@ -3,10 +3,10 @@ from typing import List
from fastapi import APIRouter, HTTPException, status, Header, Depends from fastapi import APIRouter, HTTPException, status, Header, Depends
from amarillo.app.models.AgencyConf import AgencyConf from amarillo.models.AgencyConf import AgencyConf
from amarillo.app.services.agencyconf import AgencyConfService from amarillo.services.agencyconf import AgencyConfService
from amarillo.app.services.config import config from amarillo.services.config import config
from amarillo.app.utils.container import container from amarillo.utils.container import container
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -8,9 +8,9 @@ from glob import glob
from fastapi import APIRouter, Body, Header, HTTPException, status, Depends from fastapi import APIRouter, Body, Header, HTTPException, status, Depends
from datetime import datetime from datetime import datetime
from amarillo.app.models.Carpool import Carpool from amarillo.models.Carpool import Carpool
from amarillo.app.routers.agencyconf import verify_api_key, verify_permission_for_same_agency_or_admin from amarillo.routers.agencyconf import verify_api_key, verify_permission_for_same_agency_or_admin
from amarillo.app.tests.sampledata import examples from amarillo.tests.sampledata import examples
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -25,12 +25,13 @@ router = APIRouter(
summary="Add a new or update existing carpool", summary="Add a new or update existing carpool",
description="Carpool object to be created or updated", description="Carpool object to be created or updated",
response_model=Carpool, response_model=Carpool,
response_model_exclude_none=True,
responses={ responses={
status.HTTP_404_NOT_FOUND: { status.HTTP_404_NOT_FOUND: {
"description": "Agency does not exist"}, "description": "Agency does not exist"},
}) })
async def post_carpool(carpool: Carpool = Body(..., openapi_examples=examples), async def post_carpool(carpool: Carpool = Body(..., examples=examples),
requesting_agency_id: str = Depends(verify_api_key)) -> Carpool: requesting_agency_id: str = Depends(verify_api_key)) -> Carpool:
await verify_permission_for_same_agency_or_admin(carpool.agency, requesting_agency_id) await verify_permission_for_same_agency_or_admin(carpool.agency, requesting_agency_id)
@ -48,6 +49,7 @@ async def post_carpool(carpool: Carpool = Body(..., openapi_examples=examples),
operation_id="getcarpoolById", operation_id="getcarpoolById",
summary="Find carpool by ID", summary="Find carpool by ID",
response_model=Carpool, response_model=Carpool,
response_model_exclude_none=True,
description="Find carpool by ID", description="Find carpool by ID",
responses={ responses={
status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"}, status.HTTP_404_NOT_FOUND: {"description": "Carpool not found"},

View file

@ -0,0 +1,57 @@
import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Region
from amarillo.routers.agencyconf import verify_admin_api_key
from amarillo.services.regions import RegionService
from amarillo.utils.container import container
from fastapi.responses import FileResponse
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/region",
tags=["region"]
)
@router.get("/",
operation_id="getRegions",
summary="Return all regions",
response_model=List[Region],
responses={
},
)
async def get_regions() -> List[Region]:
service: RegionService = container['regions']
return list(service.regions.values())
@router.get("/{region_id}",
operation_id="getRegionById",
summary="Find region by ID",
response_model=Region,
description="Find region by ID",
responses={
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
},
)
async def get_region(region_id: str) -> Region:
region = _assert_region_exists(region_id)
logger.info(f"Get region {region_id}.")
return region
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
region = regions.get_region(region_id)
region_exists = region is not None
if not region_exists:
message = f"Region with id {region_id} does not exist."
logger.error(message)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message)
return region

View file

@ -2,7 +2,7 @@ import json
from glob import glob from glob import glob
from typing import Dict from typing import Dict
from amarillo.app.models.Carpool import Agency from amarillo.models.Carpool import Agency
# TODO FG HB this service should also listen to pyinotify # TODO FG HB this service should also listen to pyinotify
# because the (updated) agencies are needed in the enhancer # because the (updated) agencies are needed in the enhancer

View file

@ -6,8 +6,8 @@ import logging
from fastapi import HTTPException, status from fastapi import HTTPException, status
from amarillo.app.models.AgencyConf import AgencyConf from amarillo.models.AgencyConf import AgencyConf
from amarillo.app.services.config import config from amarillo.services.config import config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -0,0 +1,61 @@
import json
import logging
from datetime import datetime
from typing import Dict
from amarillo.models.Carpool import Carpool
from amarillo.services.trips import TripStore
from amarillo.utils.utils import yesterday, is_older_than_days
logger = logging.getLogger(__name__)
class CarpoolService():
MAX_OFFER_AGE_IN_DAYS = 180
def __init__(self, trip_store):
self.trip_store = trip_store
self.carpools: Dict[str, Carpool] = {}
def is_outdated(self, carpool):
"""
A carpool offer is outdated, if
* it's completly in the past (if it's a single date offer).
As we know the start time but not latest arrival, we deem
offers starting the day before yesterday as outdated
* it's last update occured before MAX_OFFER_AGE_IN_DAYS
"""
runs_once = not isinstance(carpool.departureDate, set)
return (is_older_than_days(carpool.lastUpdated.date(), self.MAX_OFFER_AGE_IN_DAYS) or
(runs_once and carpool.departureDate < yesterday()))
def purge_outdated_offers(self):
"""
Iterates over all carpools and deletes those which are outdated
"""
for key in list(self.carpools.keys()):
cp = self.carpools.get(key)
if cp and self.is_outdated(cp):
logger.info("Purge outdated offer %s", key)
self.delete(cp.agency, cp.id)
def get(self, agency_id: str, carpool_id: str):
return self.carpools.get(f"{agency_id}:{carpool_id}")
def get_all_ids(self):
return list(self.carpools)
def put(self, agency_id: str, carpool_id: str, carpool):
self.carpools[f"{agency_id}:{carpool_id}"] = carpool
# Outdated trips (which might have been in the store)
# will be deleted
if self.is_outdated(carpool):
logger.info('Deleting outdated carpool %s:%s', agency_id, carpool_id)
self.delete(agency_id, carpool_id)
else:
self.trip_store.put_carpool(carpool)
def delete(self, agency_id: str, carpool_id: str):
id = f"{agency_id}:{carpool_id}"
if id in self.carpools:
del self.carpools[id]
self.trip_store.delete_carpool(agency_id, carpool_id)

View file

@ -6,6 +6,7 @@ class Config(BaseSettings):
admin_token: str admin_token: str
ride2go_query_data: str ride2go_query_data: str
env: str = 'DEV' env: str = 'DEV'
graphhopper_base_url: str = 'https://api.mfdz.de/gh'
stop_sources_file: str = 'conf/stop_sources.json'
config = Config(_env_file='config', _env_file_encoding='utf-8') config = Config(_env_file='config', _env_file_encoding='utf-8')

View file

@ -2,10 +2,10 @@ import logging
from typing import List from typing import List
import requests import requests
from amarillo.app.models.Carpool import Carpool, StopTime from amarillo.models.Carpool import Carpool, StopTime
from amarillo.app.services.config import config from amarillo.services.config import config
from amarillo.app.services.secrets import secrets from amarillo.services.secrets import secrets
import re import re
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -0,0 +1,75 @@
import time
mock_added = {
'trip': {
'tripId': 'mifaz:carpool-update-123',
'startTime': '07:33:00',
'startDate': '20220509',
'scheduleRelationship': 'ADDED',
'routeId': 'mifaz:carpool-update-123',
'[transit_realtime.trip_descriptor]': {
'routeUrl' : 'http://myurl',
'agencyId' : 'mifaz',
'route_long_name' : 'Angermünde nach Biesenbrow'}
},
'stopTimeUpdate': [{
'stopSequence': 1,
'arrival': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,7,33,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'de:12073:900340108',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'NONE',
'pickupType': 'COORDINATE_WITH_DRIVER'
}
}
},
{
'stopSequence': 2,
'arrival': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'departure': {
'time': time.mktime((2022,5,9,8,3,0,0,0,0)),
'uncertainty': 600
},
'stopId': 'mfdz:Ang001',
'scheduleRelationship': 'SCHEDULED',
'stop_time_properties': {
'[transit_realtime.stop_time_properties]': {
'dropoffType': 'COORDINATE_WITH_DRIVER',
'pickupType': 'NONE'
}
}
}]
}
mock_trip_updated_added = {
'id': 'mifaz:carpool-update-123',
'tripUpdate': mock_added
}
mock_trip_updated_deleted = {
'id': 'carpool-update-124',
'tripUpdate': {
'trip': {
'tripId': '141',
'startTime': '17:01:08',
'startDate': '20220509',
'scheduleRelationship': 'CANCELED',
'routeId': '141'
}
}
}

View file

@ -2,7 +2,7 @@ import json
from glob import glob from glob import glob
from typing import Dict from typing import Dict
from amarillo.app.models.Carpool import Region from amarillo.models.Carpool import Region
class RegionService: class RegionService:

View file

@ -1,12 +1,13 @@
from typing import Dict from typing import Dict
from pydantic import Field from pydantic import Field
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
from typing import Optional
# Example: secrets = { "mfdz": "some secret" } # Example: secrets = { "mfdz": "some secret" }
class Secrets(BaseSettings): class Secrets(BaseSettings):
ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN') ride2go_token: str = Field(None, env = 'RIDE2GO_TOKEN')
metrics_user: str = Field(None, env = 'METRICS_USER') # TODO: define these as required if metrics plugin is installed
metrics_password: str = Field(None, env = 'METRICS_PASSWORD') metrics_user: Optional[str] = Field(None, env = 'METRICS_USER')
metrics_password: Optional[str] = Field(None, env = 'METRICS_PASSWORD')
# Read if file exists, otherwise no error (it's in .gitignore) # Read if file exists, otherwise no error (it's in .gitignore)

View file

@ -0,0 +1,8 @@
{
"id": "mfdz",
"name": "MITFAHR|DE|ZENTRALE",
"url": "http://mfdz.de",
"timezone": "Europe/Berlin",
"lang": "de",
"email": "info@mfdz.de"
}

View file

@ -0,0 +1,8 @@
{
"id": "mifaz",
"name": "mifaz",
"url": "https://www.mifaz.de",
"timezone": "Europe/Berlin",
"lang": "de",
"email": "info@mifaz.de"
}

View file

@ -0,0 +1,8 @@
{
"id": "ride2go",
"name": "ride2go",
"url": "https://www.ride2go.de",
"timezone": "Europe/Berlin",
"lang": "de",
"email": "info@ride2go.com"
}

View file

@ -0,0 +1 @@
{"id": "bb", "bbox": [11.26, 51.36, 14.77, 53.56]}

View file

@ -0,0 +1 @@
{"id": "bw", "bbox": [ 7.51, 47.54, 10.50, 49.79]}

View file

@ -0,0 +1 @@
{"id": "by", "bbox": [ 8.97, 47.28, 13.86, 50.56]}

View file

@ -0,0 +1 @@
{"id": "nrw", "bbox": [ 5.86, 50.33, 9.45, 52.53]}

View file

@ -0,0 +1,5 @@
[
{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/stops_zhv.csv", "vicinity": 50},
{"url": "https://data.mfdz.de/mfdz/stops/parkings_osm.csv", "vicinity": 500}
]

View file

@ -1,3 +1,5 @@
# Bounding-Box Germany # Bounding-Box Germany
ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }' ride2go_query_data = '{ "southWestCoordinates": { "lat": 47.3, "lon": 5.98 }, "northEastCoordinates": { "lat": 54.99, "lon": 15.02 }, "lastModifiedSinceDays": 180 }'
env = 'PROD' env = 'PROD'
graphhopper_base_url = 'https://api.mfdz.de/gh'
stop_sources_file = 'conf/stop_sources.json'

View file

@ -0,0 +1,38 @@
from fastapi.testclient import TestClient
from amarillo.main import app
from amarillo.tests.sampledata import carpool_1234, data1
client = TestClient(app)
# TODO FG: This test needs a clean temporary storage folder, not the hard coded data dir.
def test_doc():
response = client.get("/openapi.json")
assert response.status_code == 200
def test_get_mfdz_0():
response = client.get("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with agency mfdz and id 0 does not exist."}
def test_delete_mfdz_0():
response = client.delete("/carpool/mfdz/0")
assert response.status_code == 404
assert response.json() == {"detail": "Carpool with id 0 does not exist."}
def test_post():
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The carpool should not exist yet"
response = client.post("/carpool/", json=data1)
assert response.status_code == 200, "The first post must work with 200"
response = client.get(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "After post, the get must work"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 200, "The first delete must work with 200"
response = client.delete(f"/carpool/mfdz/{data1['id']}")
assert response.status_code == 404, "The second delete must fail"

View file

@ -1,4 +1,4 @@
from amarillo.app.models.Carpool import Carpool, StopTime, Weekday from amarillo.models.Carpool import Carpool, StopTime, Weekday
# TODO use meanigful values for id and lat, lon # TODO use meanigful values for id and lat, lon
stops_1234 = [ stops_1234 = [

View file

@ -1,6 +1,5 @@
import os import os
import re import re
import os
import shutil import shutil
from pathlib import Path from pathlib import Path
import logging import logging
@ -50,7 +49,7 @@ def geodesic_distance_in_m(coord1, coord2):
def copy_static_files(files_and_dirs_to_copy): def copy_static_files(files_and_dirs_to_copy):
amarillo_dir = Path(__file__).parents[2] amarillo_dir = Path(__file__).parents[1]
source_dir = os.path.join(amarillo_dir, "static") source_dir = os.path.join(amarillo_dir, "static")
destination_dir = os.getcwd() destination_dir = os.getcwd()
@ -59,6 +58,9 @@ def copy_static_files(files_and_dirs_to_copy):
source_path = os.path.join(source_dir, item) source_path = os.path.join(source_dir, item)
destination_path = os.path.join(destination_dir, item) destination_path = os.path.join(destination_dir, item)
if not os.path.exists(source_path):
raise FileNotFoundError(source_path)
if os.path.exists(destination_path): if os.path.exists(destination_path):
# logger.info(f"{item} already exists") # logger.info(f"{item} already exists")
continue continue

View file

@ -1,17 +1,21 @@
[project] [project]
name = "amarillo-core" name = "amarillo"
version = "0.0.14" version = "0.0.15a4"
description = "Aggregates and enhances carpooling-offers and publishes them as GTFS(-RT)"
readme = "README.md"
license = {file = "LICENSE"}
keywords = ["amarillo", "ridesharing", "carpooling", "gtfs", "gtfs-rt"]
dependencies = [ dependencies = [
"fastapi[all]==0.104.0", "fastapi[all]==0.109.0",
"geopandas==0.14", "geopandas==0.14",
"uvicorn[standard]==0.23.2", "uvicorn[standard]==0.23.2",
"pydantic[dotenv]==2.4.2", "pydantic[dotenv]==2.4.2",
"protobuf==3.20.3", "protobuf==3.20.3",
"starlette", "starlette~=0.35",
"requests==2.31.0", "requests==2.31.0",
"pyproj==3.6.1", "pyproj==3.6.1",
"geojson-pydantic==1.0.1", "geojson-pydantic==1.0.1",
"pytest", "watchdog==3.0.0",
] ]
[tool.setuptools.packages] [tool.setuptools.packages]

View file

@ -1,9 +1,9 @@
fastapi[all]==0.104.0 fastapi[all]==0.109.0
geopandas==0.14 geopandas==0.14
uvicorn[standard]==0.23.2 uvicorn[standard]==0.23.2
pydantic[dotenv]==2.4.2 pydantic[dotenv]==2.4.2
protobuf==3.20.3 protobuf==3.20.3
starlette starlette~=0.35
requests==2.31.0 requests==2.31.0
pyproj==3.6.1 pyproj==3.6.1
geojson-pydantic==1.0.1 geojson-pydantic==1.0.1