Run as FastAPI application
This commit is contained in:
parent
17a6888583
commit
96697b6b7a
1
amarillo-enhancer/__init__.py
Normal file
1
amarillo-enhancer/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
# from .enhancer import *
|
||||
|
|
@ -8,9 +8,9 @@ from amarillo.models.Carpool import Carpool
|
|||
from amarillo.plugins.enhancer.services import stops
|
||||
from amarillo.plugins.enhancer.services import trips
|
||||
from amarillo.plugins.enhancer.services.carpools import CarpoolService
|
||||
from amarillo.plugins.enhancer.services import gtfs_generator
|
||||
from amarillo.services.config import config
|
||||
from amarillo.configuration import configure_services
|
||||
from .services.trips import TripTransformer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -19,22 +19,29 @@ enhancer_configured = False
|
|||
def configure_enhancer_services():
|
||||
#Make sure configuration only happens once
|
||||
global enhancer_configured
|
||||
global transformer
|
||||
if enhancer_configured:
|
||||
logger.info("Enhancer is already configured")
|
||||
return
|
||||
|
||||
configure_services()
|
||||
|
||||
|
||||
|
||||
|
||||
logger.info("Load stops...")
|
||||
with open(config.stop_sources_file) as stop_sources_file:
|
||||
with open('data/stop_sources.json') as stop_sources_file:
|
||||
stop_sources = json.load(stop_sources_file)
|
||||
stop_store = stops.StopsStore(stop_sources)
|
||||
|
||||
stop_store.load_stop_sources()
|
||||
# TODO: do we need container?
|
||||
container['stops_store'] = stop_store
|
||||
container['trips_store'] = trips.TripStore(stop_store)
|
||||
container['carpools'] = CarpoolService(container['trips_store'])
|
||||
|
||||
transformer = TripTransformer(stop_store)
|
||||
|
||||
logger.info("Restore carpools...")
|
||||
|
||||
for agency_id in container['agencies'].agencies:
|
||||
94
amarillo-enhancer/enhancer.py
Normal file
94
amarillo-enhancer/enhancer.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
from .models.Carpool import Carpool
|
||||
from .services.trips import TripTransformer
|
||||
import logging
|
||||
import logging.config
|
||||
from fastapi import FastAPI, status, Body
|
||||
from .configuration import configure_enhancer_services
|
||||
from amarillo.utils.container import container
|
||||
|
||||
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
|
||||
logger = logging.getLogger("enhancer")
|
||||
|
||||
#TODO: clean up metadata
|
||||
app = FastAPI(title="Amarillo Enhancer",
|
||||
description="This service allows carpool agencies to publish "
|
||||
"their trip offers, so routing services may suggest "
|
||||
"them as trip options. For carpool offers, only the "
|
||||
"minimum required information (origin/destination, "
|
||||
"optionally intermediate stops, departure time and a "
|
||||
"deep link for booking/contacting the driver) needs to "
|
||||
"be published, booking/contact exchange is to be "
|
||||
"handled by the publishing agency.",
|
||||
version="0.0.1",
|
||||
# TODO 404
|
||||
terms_of_service="http://mfdz.de/carpool-hub-terms/",
|
||||
contact={
|
||||
# "name": "unused",
|
||||
# "url": "http://unused",
|
||||
"email": "info@mfdz.de",
|
||||
},
|
||||
license_info={
|
||||
"name": "AGPL-3.0 License",
|
||||
"url": "https://www.gnu.org/licenses/agpl-3.0.de.html",
|
||||
},
|
||||
openapi_tags=[
|
||||
{
|
||||
"name": "carpool",
|
||||
# "description": "Find out more about Amarillo - the carpooling intermediary",
|
||||
"externalDocs": {
|
||||
"description": "Find out more about Amarillo - the carpooling intermediary",
|
||||
"url": "https://github.com/mfdz/amarillo",
|
||||
},
|
||||
}],
|
||||
servers=[
|
||||
{
|
||||
"description": "MobiData BW Amarillo service",
|
||||
"url": "https://amarillo.mobidata-bw.de"
|
||||
},
|
||||
{
|
||||
"description": "DABB bbnavi Amarillo service",
|
||||
"url": "https://amarillo.bbnavi.de"
|
||||
},
|
||||
{
|
||||
"description": "Demo server by MFDZ",
|
||||
"url": "https://amarillo.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Dev server for development",
|
||||
"url": "https://amarillo-dev.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Server for Mitanand project",
|
||||
"url": "https://mitanand.mfdz.de"
|
||||
},
|
||||
{
|
||||
"description": "Localhost for development",
|
||||
"url": "http://localhost:8000"
|
||||
}
|
||||
],
|
||||
redoc_url=None
|
||||
)
|
||||
configure_enhancer_services()
|
||||
stops_store = container['stops_store']
|
||||
transformer : TripTransformer = TripTransformer(stops_store)
|
||||
logger.info(transformer)
|
||||
|
||||
@app.post("/",
|
||||
operation_id="enhancecarpool",
|
||||
summary="Add a new or update existing carpool",
|
||||
description="Carpool object to be enhanced",
|
||||
response_model=Carpool, # TODO
|
||||
response_model_exclude_none=True,
|
||||
responses={
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": "Agency does not exist"},
|
||||
|
||||
})
|
||||
#TODO: add examples
|
||||
async def post_carpool(carpool: Carpool = Body(...)) -> Carpool:
|
||||
|
||||
logger.info(f"POST trip {carpool.agency}:{carpool.id}.")
|
||||
|
||||
enhanced = transformer.enhance_carpool(carpool)
|
||||
|
||||
return enhanced
|
||||
407
amarillo-enhancer/models/Carpool.py
Normal file
407
amarillo-enhancer/models/Carpool.py
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
from datetime import time, date, datetime
|
||||
from pydantic import ConfigDict, BaseModel, Field, HttpUrl, EmailStr
|
||||
from typing import List, Union, Set, Optional, Tuple
|
||||
from datetime import time
|
||||
from pydantic import BaseModel, Field
|
||||
from geojson_pydantic.geometries import LineString
|
||||
from enum import Enum, IntEnum
|
||||
|
||||
NumType = Union[float, int]
|
||||
|
||||
MAX_STOPS_PER_TRIP = 100
|
||||
|
||||
class Weekday(str, Enum):
|
||||
monday = "monday"
|
||||
tuesday = "tuesday"
|
||||
wednesday = "wednesday"
|
||||
thursday = "thursday"
|
||||
friday = "friday"
|
||||
saturday = "saturday"
|
||||
sunday = "sunday"
|
||||
|
||||
class PickupDropoffType(str, Enum):
|
||||
pickup_and_dropoff = "pickup_and_dropoff"
|
||||
only_pickup = "only_pickup"
|
||||
only_dropoff = "only_dropoff"
|
||||
|
||||
class YesNoEnum(IntEnum):
|
||||
yes = 1
|
||||
no = 2
|
||||
|
||||
class LuggageSize(IntEnum):
|
||||
small = 1
|
||||
medium = 2
|
||||
large = 3
|
||||
|
||||
class StopTime(BaseModel):
|
||||
id: Optional[str] = Field(
|
||||
None,
|
||||
description="Optional Stop ID. If given, it should conform to the "
|
||||
"IFOPT specification. For official transit stops, "
|
||||
"it should be their official IFOPT. In Germany, this is "
|
||||
"the DHID which is available via the 'zentrales "
|
||||
"Haltestellenverzeichnis (zHV)', published by DELFI e.V. "
|
||||
"Note, that currently carpooling location.",
|
||||
pattern=r"^([a-zA-Z]{2,6}):\d+:\d+(:\d*(:\w+)?)?$|^osm:[nwr]\d+$",
|
||||
examples=["de:12073:900340137::2"])
|
||||
|
||||
name: str = Field(
|
||||
description="Name of the location. Use a name that people will "
|
||||
"understand in the local and tourist vernacular.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
examples=["Angermünde, Breitscheidstr."])
|
||||
|
||||
departureTime: Optional[str] = Field(
|
||||
None,
|
||||
description="Departure time from a specific stop for a specific "
|
||||
"carpool trip. For times occurring after midnight on the "
|
||||
"service day, the time is given as a value greater than "
|
||||
"24:00:00 in HH:MM:SS local time for the day on which the "
|
||||
"trip schedule begins. If there are not separate times for "
|
||||
"arrival and departure at a stop, the same value for arrivalTime "
|
||||
"and departureTime. Note, that arrivalTime/departureTime of "
|
||||
"the stops are not mandatory, and might then be estimated by "
|
||||
"this service.",
|
||||
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
|
||||
examples=["17:00"]
|
||||
)
|
||||
|
||||
arrivalTime: Optional[str] = Field(
|
||||
None,
|
||||
description="Arrival time at a specific stop for a specific trip on a "
|
||||
"carpool route. If there are not separate times for arrival "
|
||||
"and departure at a stop, enter the same value for arrivalTime "
|
||||
"and departureTime. For times occurring after midnight on the "
|
||||
"service day, the time as a value greater than 24:00:00 in "
|
||||
"HH:MM:SS local time for the day on which the trip schedule "
|
||||
"begins. Note, that arrivalTime/departureTime of the stops "
|
||||
"are not mandatory, and might then be estimated by this "
|
||||
"service.",
|
||||
pattern=r"^[0-9][0-9]:[0-5][0-9](:[0-5][0-9])?$",
|
||||
examples=["18:00"])
|
||||
|
||||
lat: float = Field(
|
||||
description="Latitude of the location. Should describe the location "
|
||||
"where a passenger may mount/dismount the vehicle.",
|
||||
ge=-90,
|
||||
lt=90,
|
||||
examples=["53.0137311391"])
|
||||
|
||||
lon: float = Field(
|
||||
description="Longitude of the location. Should describe the location "
|
||||
"where a passenger may mount/dismount the vehicle.",
|
||||
ge=-180,
|
||||
lt=180,
|
||||
examples=["13.9934706687"])
|
||||
|
||||
pickup_dropoff: Optional[PickupDropoffType] = Field(
|
||||
None, description="If passengers may be picked up, dropped off or both at this stop. "
|
||||
"If not specified, this service may assign this according to some custom rules. "
|
||||
"E.g. Amarillo may allow pickup only for the first third of the distance travelled, "
|
||||
"and dropoff only for the last third." ,
|
||||
examples=["only_pickup"]
|
||||
)
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"example": "{'id': 'de:12073:900340137::2', 'name': "
|
||||
"'Angermünde, Breitscheidstr.', 'lat': 53.0137311391, "
|
||||
"'lon': 13.9934706687}"
|
||||
})
|
||||
|
||||
class Region(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the region.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["bb"])
|
||||
|
||||
bbox: Tuple[NumType, NumType, NumType, NumType] = Field(
|
||||
description="Bounding box of the region. Format is [minLon, minLat, maxLon, maxLat]",
|
||||
examples=[[10.5,49.2,11.3,51.3]])
|
||||
|
||||
class RidesharingInfo(BaseModel):
|
||||
number_free_seats: int = Field(
|
||||
description="Number of free seats",
|
||||
ge=0,
|
||||
examples=[3])
|
||||
|
||||
same_gender: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Trip only for same gender:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[1])
|
||||
luggage_size: Optional[LuggageSize] = Field(
|
||||
None,
|
||||
description="Size of the luggage:"
|
||||
"1: small"
|
||||
"2: medium"
|
||||
"3: large",
|
||||
examples=[3])
|
||||
animal_car: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Animals in Car allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
car_model: Optional[str] = Field(
|
||||
None,
|
||||
description="Car model",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["Golf"])
|
||||
car_brand: Optional[str] = Field(
|
||||
None,
|
||||
description="Car brand",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
examples=["VW"])
|
||||
|
||||
creation_date: datetime = Field(
|
||||
description="Date when trip was created",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
|
||||
smoking: Optional[YesNoEnum] = Field(
|
||||
None,
|
||||
description="Smoking allowed:"
|
||||
"1: Yes"
|
||||
"2: No",
|
||||
examples=[2])
|
||||
|
||||
payment_method: Optional[str] = Field(
|
||||
None,
|
||||
description="Method of payment",
|
||||
min_length=1,
|
||||
max_length=48)
|
||||
|
||||
class Driver(BaseModel):
|
||||
driver_id: Optional[str] = Field(
|
||||
None,
|
||||
description="Identifies the driver.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
pattern='^[a-zA-Z0-9_-]+$',
|
||||
examples=["789"])
|
||||
profile_picture: Optional[HttpUrl] = Field(
|
||||
None,
|
||||
description="URL that contains the profile picture",
|
||||
examples=["https://mfdz.de/driver/789/picture"])
|
||||
rating: Optional[int] = Field(
|
||||
None,
|
||||
description="Rating of the driver from 1 to 5."
|
||||
"0 no rating yet",
|
||||
ge=0,
|
||||
le=5,
|
||||
examples=[5])
|
||||
|
||||
class Agency(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the agency.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
name: str = Field(
|
||||
description="Name",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
pattern=r'^[\w -\.\|]+$',
|
||||
examples=["MITFAHR|DE|ZENTRALE"])
|
||||
|
||||
url: HttpUrl = Field(
|
||||
description="URL of the carpool agency.",
|
||||
examples=["https://mfdz.de/"])
|
||||
|
||||
timezone: str = Field(
|
||||
description="Timezone where the carpool agency is located.",
|
||||
min_length=1,
|
||||
max_length=48,
|
||||
pattern=r'^[\w/]+$',
|
||||
examples=["Europe/Berlin"])
|
||||
|
||||
lang: str = Field(
|
||||
description="Primary language used by this carpool agency.",
|
||||
min_length=1,
|
||||
max_length=2,
|
||||
pattern=r'^[a-zA-Z_]+$',
|
||||
examples=["de"])
|
||||
|
||||
email: EmailStr = Field(
|
||||
description="""Email address actively monitored by the agency’s
|
||||
customer service department. This email address should be a direct
|
||||
contact point where carpool riders can reach a customer service
|
||||
representative at the agency.""",
|
||||
examples=["info@mfdz.de"])
|
||||
|
||||
terms_url: Optional[HttpUrl] = Field(
|
||||
None, description="""A fully qualified URL pointing to the terms of service
|
||||
(also often called "terms of use" or "terms and conditions")
|
||||
for the service.""",
|
||||
examples=["https://mfdz.de/nutzungsbedingungen"])
|
||||
|
||||
privacy_url: Optional[HttpUrl] = Field(
|
||||
None, description="""A fully qualified URL pointing to the privacy policy for the service.""",
|
||||
examples=["https://mfdz.de/datenschutz"])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Agency",
|
||||
"description": "Carpool agency.",
|
||||
"example":
|
||||
#"""
|
||||
{
|
||||
"id": "mfdz",
|
||||
"name": "MITFAHR|DE|ZENTRALE",
|
||||
"url": "http://mfdz.de",
|
||||
"timezone": "Europe/Berlin",
|
||||
"lang": "de",
|
||||
"email": "info@mfdz.de",
|
||||
"terms_url": "https://mfdz.de/nutzungsbedingungen",
|
||||
"privacy_url": "https://mfdz.de/datenschutz",
|
||||
}
|
||||
#"""
|
||||
})
|
||||
|
||||
class Carpool(BaseModel):
|
||||
id: str = Field(
|
||||
description="ID of the carpool. Should be supplied and managed by the "
|
||||
"carpooling platform which originally published this "
|
||||
"offer.",
|
||||
min_length=1,
|
||||
max_length=256,
|
||||
pattern='^[a-zA-Z0-9_-]+$',
|
||||
examples=["103361"])
|
||||
|
||||
agency: str = Field(
|
||||
description="Short one string name of the agency, used as a namespace "
|
||||
"for ids.",
|
||||
min_length=1,
|
||||
max_length=20,
|
||||
pattern='^[a-zA-Z0-9]+$',
|
||||
examples=["mfdz"])
|
||||
|
||||
driver: Optional[Driver] = Field(
|
||||
None,
|
||||
description="Driver data",
|
||||
examples=["""
|
||||
{
|
||||
"driver_id": "123",
|
||||
"profile_picture": "https://mfdz.de/driver/789/picture",
|
||||
"rating": 5
|
||||
}
|
||||
"""])
|
||||
|
||||
deeplink: HttpUrl = Field(
|
||||
description="Link to an information page providing detail information "
|
||||
"for this offer, and, especially, an option to book the "
|
||||
"trip/contact the driver.",
|
||||
examples=["https://mfdz.de/trip/103361"])
|
||||
|
||||
stops: List[StopTime] = Field(
|
||||
...,
|
||||
min_length=2,
|
||||
max_length=MAX_STOPS_PER_TRIP,
|
||||
description="Stops which this carpool passes by and offers to pick "
|
||||
"up/drop off passengers. This list must at minimum "
|
||||
"include two stops, the origin and destination of this "
|
||||
"carpool trip. Note that for privacy reasons, the stops "
|
||||
"usually should be official locations, like meeting "
|
||||
"points, carpool parkings, ridesharing benches or "
|
||||
"similar.",
|
||||
examples=["""[
|
||||
{
|
||||
"id": "03",
|
||||
"name": "drei",
|
||||
"lat": 45,
|
||||
"lon": 9
|
||||
},
|
||||
{
|
||||
"id": "03b",
|
||||
"name": "drei b",
|
||||
"lat": 45,
|
||||
"lon": 9
|
||||
}
|
||||
]"""])
|
||||
|
||||
# TODO can be removed, as first stop has departureTime as well
|
||||
departureTime: time = Field(
|
||||
description="Time when the carpool leaves at the first stop. Note, "
|
||||
"that this API currently does not support flexible time "
|
||||
"windows for departure, though drivers might be flexible."
|
||||
"For recurring trips, the weekdays this trip will run. ",
|
||||
examples=["17:00"])
|
||||
|
||||
# TODO think about using googlecal Format
|
||||
departureDate: Union[date, Set[Weekday]] = Field(
|
||||
description="Date when the trip will start, in case it is a one-time "
|
||||
"trip. For recurring trips, specify weekdays. "
|
||||
"Note, that when for different weekdays different "
|
||||
"departureTimes apply, multiple carpool offers should be "
|
||||
"published.",
|
||||
examples=['A single date 2022-04-04 or a list of weekdays ["saturday", '
|
||||
'"sunday"]'])
|
||||
route_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Route color designation that matches public facing material. "
|
||||
"The color difference between route_color and route_text_color "
|
||||
"should provide sufficient contrast when viewed on a black and "
|
||||
"white screen.",
|
||||
examples=["0039A6"])
|
||||
route_text_color: Optional[str] = Field(
|
||||
None,
|
||||
pattern='^([0-9A-Fa-f]{6})$',
|
||||
description="Legible color to use for text drawn against a background of "
|
||||
"route_color. The color difference between route_color and "
|
||||
"route_text_color should provide sufficient contrast when "
|
||||
"viewed on a black and white screen.",
|
||||
examples=["D4D2D2"])
|
||||
path: Optional[LineString] = Field(
|
||||
None, description="Optional route geometry as json LineString.")
|
||||
|
||||
lastUpdated: Optional[datetime] = Field(
|
||||
None,
|
||||
description="LastUpdated should reflect the last time, the user "
|
||||
"providing this offer, made an update or confirmed, "
|
||||
"the offer is still valid. Note that this service might "
|
||||
"purge outdated offers (e.g. older than 180 days). If not "
|
||||
"passed, the service may assume 'now'",
|
||||
examples=["2022-02-13T20:20:39+00:00"])
|
||||
additional_ridesharing_info: Optional[RidesharingInfo] = Field(
|
||||
None,
|
||||
description="Extension of GRFS to the GTFS standard",
|
||||
examples=["""
|
||||
{
|
||||
"number_free_seats": 2,
|
||||
"creation_date": "2022-02-13T20:20:39+00:00",
|
||||
"same_gender": 2,
|
||||
"smoking": 1,
|
||||
"luggage_size": 3
|
||||
}
|
||||
"""])
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"title": "Carpool",
|
||||
# description ...
|
||||
"example":
|
||||
"""
|
||||
{
|
||||
"id": "1234",
|
||||
"agency": "mfdz",
|
||||
"deeplink": "http://mfdz.de",
|
||||
"stops": [
|
||||
{
|
||||
"id": "de:12073:900340137::2", "name": "ABC",
|
||||
"lat": 45, "lon": 9
|
||||
},
|
||||
{
|
||||
"id": "de:12073:900340137::3", "name": "XYZ",
|
||||
"lat": 45, "lon": 9
|
||||
}
|
||||
],
|
||||
"departureTime": "12:34",
|
||||
"departureDate": "2022-03-30",
|
||||
"lastUpdated": "2022-03-30T12:34:00+00:00"
|
||||
}
|
||||
"""
|
||||
})
|
||||
|
|
@ -1 +0,0 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
|
|
@ -1 +0,0 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
|
|
@ -1 +0,0 @@
|
|||
from .enhancer import *
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import json
|
||||
from threading import Thread
|
||||
import logging
|
||||
import logging.config
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
|
||||
from amarillo.plugins.enhancer.configuration import configure_enhancer_services
|
||||
from amarillo.utils.container import container
|
||||
from amarillo.models.Carpool import Carpool
|
||||
from amarillo.utils.utils import agency_carpool_ids_from_filename
|
||||
|
||||
logging.config.fileConfig('logging.conf', disable_existing_loggers=False)
|
||||
logger = logging.getLogger("enhancer")
|
||||
|
||||
class EventHandler(FileSystemEventHandler):
|
||||
# TODO FG HB should watch for both carpools and agencies
|
||||
# in data/agency, data/agencyconf, see AgencyConfService
|
||||
|
||||
def on_closed(self, event):
|
||||
|
||||
logger.info("CLOSE_WRITE: Created %s", event.src_path)
|
||||
try:
|
||||
with open(event.src_path, 'r', encoding='utf-8') as f:
|
||||
dict = json.load(f)
|
||||
carpool = Carpool(**dict)
|
||||
|
||||
container['carpools'].put(carpool.agency, carpool.id, carpool)
|
||||
except FileNotFoundError as e:
|
||||
logger.error("Carpool could not be added, as already deleted (%s)", event.src_path)
|
||||
except:
|
||||
logger.exception("Eventhandler on_closed encountered exception")
|
||||
|
||||
def on_deleted(self, event):
|
||||
try:
|
||||
logger.info("DELETE: Removing %s", event.src_path)
|
||||
(agency_id, carpool_id) = agency_carpool_ids_from_filename(event.src_path)
|
||||
container['carpools'].delete(agency_id, carpool_id)
|
||||
except:
|
||||
logger.exception("Eventhandler on_deleted encountered exception")
|
||||
|
||||
|
||||
def run_enhancer():
|
||||
logger.info("Hello Enhancer")
|
||||
|
||||
configure_enhancer_services()
|
||||
|
||||
observer = Observer() # Watch Manager
|
||||
|
||||
observer.schedule(EventHandler(), 'data/carpool', recursive=True)
|
||||
observer.start()
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
# TODO FG Is this really needed?
|
||||
cnt = 0
|
||||
ENHANCER_LOG_INTERVAL_IN_S = 600
|
||||
while True:
|
||||
if cnt == ENHANCER_LOG_INTERVAL_IN_S:
|
||||
logger.debug("Currently stored carpool ids: %s", container['carpools'].get_all_ids())
|
||||
cnt = 0
|
||||
|
||||
time.sleep(1)
|
||||
cnt += 1
|
||||
finally:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
logger.info("Goodbye Enhancer")
|
||||
|
||||
def setup(app):
|
||||
thread = Thread(target=run_enhancer, daemon=True)
|
||||
thread.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_enhancer()
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# TODO: move to enhancer
|
||||
from collections import namedtuple
|
||||
from datetime import timedelta
|
||||
|
||||
GtfsFeedInfo = namedtuple('GtfsFeedInfo', 'feed_id feed_publisher_name feed_publisher_url feed_lang feed_version')
|
||||
GtfsAgency = namedtuple('GtfsAgency', 'agency_id agency_name agency_url agency_timezone agency_lang agency_email')
|
||||
GtfsRoute = namedtuple('GtfsRoute', 'agency_id route_id route_long_name route_type route_url route_short_name')
|
||||
GtfsStop = namedtuple('GtfsStop', 'stop_id stop_lat stop_lon stop_name')
|
||||
GtfsStopTime = namedtuple('GtfsStopTime', 'trip_id departure_time arrival_time stop_id stop_sequence pickup_type drop_off_type timepoint')
|
||||
GtfsTrip = namedtuple('GtfsTrip', 'route_id trip_id service_id shape_id trip_headsign bikes_allowed')
|
||||
GtfsCalendar = namedtuple('GtfsCalendar', 'service_id start_date end_date monday tuesday wednesday thursday friday saturday sunday')
|
||||
GtfsCalendarDate = namedtuple('GtfsCalendarDate', 'service_id date exception_type')
|
||||
GtfsShape = namedtuple('GtfsShape','shape_id shape_pt_lon shape_pt_lat shape_pt_sequence')
|
||||
|
||||
# TODO Move to utils
|
||||
class GtfsTimeDelta(timedelta):
|
||||
def __str__(self):
|
||||
seconds = self.total_seconds()
|
||||
hours = seconds // 3600
|
||||
minutes = (seconds % 3600) // 60
|
||||
seconds = seconds % 60
|
||||
str = '{:02d}:{:02d}:{:02d}'.format(int(hours), int(minutes), int(seconds))
|
||||
return (str)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, timedelta):
|
||||
return self.__class__(self.days + other.days,
|
||||
self.seconds + other.seconds,
|
||||
self.microseconds + other.microseconds)
|
||||
return NotImplemented
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
# Constants
|
||||
|
||||
NO_BIKES_ALLOWED = 2
|
||||
RIDESHARING_ROUTE_TYPE = 1551
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_ADDED = 1
|
||||
CALENDAR_DATES_EXCEPTION_TYPE_REMOVED = 2
|
||||
STOP_TIMES_STOP_TYPE_REGULARLY = 0
|
||||
STOP_TIMES_STOP_TYPE_NONE = 1
|
||||
STOP_TIMES_STOP_TYPE_PHONE_AGENCY = 2
|
||||
STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER = 3
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE = 0
|
||||
STOP_TIMES_TIMEPOINT_EXACT = 1
|
||||
|
||||
MFDZ_DEFAULT_UNCERTAINITY = 600
|
||||
|
|
@ -1,377 +0,0 @@
|
|||
from amarillo.plugins.enhancer.models.gtfs import GtfsTimeDelta, GtfsStopTime
|
||||
from amarillo.models.Carpool import MAX_STOPS_PER_TRIP, Carpool, Weekday, StopTime, PickupDropoffType, Driver, RidesharingInfo
|
||||
from amarillo.services.config import config
|
||||
from amarillo.plugins.enhancer.services.gtfs_constants import *
|
||||
from amarillo.plugins.enhancer.services.routing import RoutingService, RoutingException
|
||||
from amarillo.plugins.enhancer.services.stops import is_carpooling_stop
|
||||
from amarillo.utils.utils import assert_folder_exists, is_older_than_days, yesterday, geodesic_distance_in_m
|
||||
from shapely.geometry import Point, LineString, box
|
||||
from geojson_pydantic.geometries import LineString as GeoJSONLineString
|
||||
from datetime import datetime, timedelta
|
||||
import numpy as np
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Trip:
|
||||
|
||||
def __init__(self, trip_id, route_name, headsign, url, calendar, departureTime, path, agency, lastUpdated, stop_times, driver: Driver, additional_ridesharing_info: RidesharingInfo, bbox):
|
||||
if isinstance(calendar, set):
|
||||
self.runs_regularly = True
|
||||
self.weekdays = [
|
||||
1 if Weekday.monday in calendar else 0,
|
||||
1 if Weekday.tuesday in calendar else 0,
|
||||
1 if Weekday.wednesday in calendar else 0,
|
||||
1 if Weekday.thursday in calendar else 0,
|
||||
1 if Weekday.friday in calendar else 0,
|
||||
1 if Weekday.saturday in calendar else 0,
|
||||
1 if Weekday.sunday in calendar else 0,
|
||||
]
|
||||
start_in_day = self._total_seconds(departureTime)
|
||||
else:
|
||||
self.start = datetime.combine(calendar, departureTime)
|
||||
self.runs_regularly = False
|
||||
self.weekdays = [0,0,0,0,0,0,0]
|
||||
|
||||
self.start_time = departureTime
|
||||
self.path = path
|
||||
self.trip_id = trip_id
|
||||
self.url = url
|
||||
self.agency = agency
|
||||
self.stops = []
|
||||
self.lastUpdated = lastUpdated
|
||||
self.stop_times = stop_times
|
||||
self.driver = driver
|
||||
self.additional_ridesharing_info = additional_ridesharing_info
|
||||
self.bbox = bbox
|
||||
self.route_name = route_name
|
||||
self.trip_headsign = headsign
|
||||
|
||||
def path_as_line_string(self):
|
||||
return self.path
|
||||
|
||||
def _total_seconds(self, instant):
|
||||
return instant.hour * 3600 + instant.minute * 60 + instant.second
|
||||
|
||||
def start_time_str(self):
|
||||
return self.start_time.strftime("%H:%M:%S")
|
||||
|
||||
def next_trip_dates(self, start_date, day_count=14):
|
||||
if self.runs_regularly:
|
||||
for single_date in (start_date + timedelta(n) for n in range(day_count)):
|
||||
if self.weekdays[single_date.weekday()]==1:
|
||||
yield single_date.strftime("%Y%m%d")
|
||||
else:
|
||||
yield self.start.strftime("%Y%m%d")
|
||||
|
||||
def route_long_name(self):
|
||||
return self.route_name
|
||||
|
||||
def intersects(self, bbox):
|
||||
return self.bbox.intersects(box(*bbox))
|
||||
|
||||
|
||||
class TripStore():
|
||||
"""
|
||||
TripStore maintains the currently valid trips. A trip is a
|
||||
carpool offer enhanced with all stops this
|
||||
|
||||
Attributes:
|
||||
trips Dict of currently valid trips.
|
||||
deleted_trips Dict of recently deleted trips.
|
||||
"""
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.transformer = TripTransformer(stops_store)
|
||||
self.stops_store = stops_store
|
||||
self.trips = {}
|
||||
self.deleted_trips = {}
|
||||
self.recent_trips = {}
|
||||
|
||||
|
||||
def put_carpool(self, carpool: Carpool):
|
||||
"""
|
||||
Adds carpool to the TripStore.
|
||||
"""
|
||||
id = "{}:{}".format(carpool.agency, carpool.id)
|
||||
filename = f'data/enhanced/{carpool.agency}/{carpool.id}.json'
|
||||
try:
|
||||
existing_carpool = self._load_carpool_if_exists(carpool.agency, carpool.id)
|
||||
if existing_carpool and existing_carpool.lastUpdated == carpool.lastUpdated:
|
||||
enhanced_carpool = existing_carpool
|
||||
else:
|
||||
if len(carpool.stops) < 2 or self.distance_in_m(carpool) < 1000:
|
||||
logger.warning("Failed to add carpool %s:%s to TripStore, distance too low", carpool.agency, carpool.id)
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
return
|
||||
enhanced_carpool = self.transformer.enhance_carpool(carpool)
|
||||
# TODO should only store enhanced_carpool, if it has 2 or more stops
|
||||
assert_folder_exists(f'data/enhanced/{carpool.agency}/')
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
f.write(enhanced_carpool.json())
|
||||
logger.info("Added enhanced carpool %s:%s", carpool.agency, carpool.id)
|
||||
|
||||
return self._load_as_trip(enhanced_carpool)
|
||||
except RoutingException as err:
|
||||
logger.warning("Failed to add carpool %s:%s to TripStore due to RoutingException %s", carpool.agency, carpool.id, getattr(err, 'message', repr(err)))
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
except Exception as err:
|
||||
logger.error("Failed to add carpool %s:%s to TripStore.", carpool.agency, carpool.id, exc_info=True)
|
||||
self.handle_failed_carpool_enhancement(carpool)
|
||||
|
||||
def handle_failed_carpool_enhancement(sellf, carpool: Carpool):
|
||||
assert_folder_exists(f'data/failed/{carpool.agency}/')
|
||||
with open(f'data/failed/{carpool.agency}/{carpool.id}.json', 'w', encoding='utf-8') as f:
|
||||
f.write(carpool.json())
|
||||
|
||||
def distance_in_m(self, carpool):
|
||||
if len(carpool.stops) < 2:
|
||||
return 0
|
||||
s1 = carpool.stops[0]
|
||||
s2 = carpool.stops[-1]
|
||||
return geodesic_distance_in_m((s1.lon, s1.lat),(s2.lon, s2.lat))
|
||||
|
||||
def recently_added_trips(self):
|
||||
return list(self.recent_trips.values())
|
||||
|
||||
def recently_deleted_trips(self):
|
||||
return list(self.deleted_trips.values())
|
||||
|
||||
def _load_carpool_if_exists(self, agency_id: str, carpool_id: str):
|
||||
if carpool_exists(agency_id, carpool_id, 'data/enhanced'):
|
||||
try:
|
||||
return load_carpool(agency_id, carpool_id, 'data/enhanced')
|
||||
except Exception as e:
|
||||
# An error on restore could be caused by model changes,
|
||||
# in such a case, it need's to be recreated
|
||||
logger.warning("Could not restore enhanced trip %s:%s, reason: %s", agency_id, carpool_id, repr(e))
|
||||
|
||||
return None
|
||||
|
||||
def _load_as_trip(self, carpool: Carpool):
|
||||
trip = self.transformer.transform_to_trip(carpool)
|
||||
id = trip.trip_id
|
||||
self.trips[id] = trip
|
||||
if not is_older_than_days(carpool.lastUpdated, 1):
|
||||
self.recent_trips[id] = trip
|
||||
logger.debug("Added trip %s", id)
|
||||
|
||||
return trip
|
||||
|
||||
def delete_carpool(self, agency_id: str, carpool_id: str):
|
||||
"""
|
||||
Deletes carpool from the TripStore.
|
||||
"""
|
||||
agencyScopedCarpoolId = f"{agency_id}:{carpool_id}"
|
||||
trip_to_be_deleted = self.trips.get(agencyScopedCarpoolId)
|
||||
if trip_to_be_deleted:
|
||||
self.deleted_trips[agencyScopedCarpoolId] = trip_to_be_deleted
|
||||
del self.trips[agencyScopedCarpoolId]
|
||||
|
||||
if self.recent_trips.get(agencyScopedCarpoolId):
|
||||
del self.recent_trips[agencyScopedCarpoolId]
|
||||
|
||||
if carpool_exists(agency_id, carpool_id):
|
||||
remove_carpool_file(agency_id, carpool_id)
|
||||
|
||||
logger.debug("Deleted trip %s", id)
|
||||
|
||||
def unflag_unrecent_updates(self):
|
||||
"""
|
||||
Trips that were last updated before yesterday, are not recent
|
||||
any longer. As no updates need to be sent for them any longer,
|
||||
they will be removed from recent recent_trips and deleted_trips.
|
||||
"""
|
||||
for key in list(self.recent_trips):
|
||||
t = self.recent_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.recent_trips[key]
|
||||
|
||||
for key in list(self.deleted_trips):
|
||||
t = self.deleted_trips.get(key)
|
||||
if t and t.lastUpdated.date() < yesterday():
|
||||
del self.deleted_trips[key]
|
||||
|
||||
|
||||
class TripTransformer:
|
||||
REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS = True
|
||||
REPLACEMENT_STOPS_SERACH_RADIUS_IN_M = 1000
|
||||
SIMPLIFY_TOLERANCE = 0.0001
|
||||
|
||||
router = RoutingService(config.graphhopper_base_url)
|
||||
|
||||
def __init__(self, stops_store):
|
||||
self.stops_store = stops_store
|
||||
|
||||
def transform_to_trip(self, carpool : Carpool):
|
||||
stop_times = self._convert_stop_times(carpool)
|
||||
route_name = carpool.stops[0].name + " nach " + carpool.stops[-1].name
|
||||
headsign= carpool.stops[-1].name
|
||||
trip_id = self._trip_id(carpool)
|
||||
path = carpool.path
|
||||
bbox = box(
|
||||
min([pt[0] for pt in path.coordinates]),
|
||||
min([pt[1] for pt in path.coordinates]),
|
||||
max([pt[0] for pt in path.coordinates]),
|
||||
max([pt[1] for pt in path.coordinates]))
|
||||
|
||||
trip = Trip(trip_id, route_name, headsign, str(carpool.deeplink), carpool.departureDate, carpool.departureTime, carpool.path, carpool.agency, carpool.lastUpdated, stop_times, carpool.driver, carpool.additional_ridesharing_info, bbox)
|
||||
|
||||
return trip
|
||||
|
||||
def _trip_id(self, carpool):
|
||||
return f"{carpool.agency}:{carpool.id}"
|
||||
|
||||
def _replace_stops_by_transit_stops(self, carpool, max_search_distance):
|
||||
new_stops = []
|
||||
for carpool_stop in carpool.stops:
|
||||
new_stops.append(self.stops_store.find_closest_stop(carpool_stop, max_search_distance))
|
||||
return new_stops
|
||||
|
||||
def enhance_carpool(self, carpool):
|
||||
if self.REPLACE_CARPOOL_STOPS_BY_CLOSEST_TRANSIT_STOPS:
|
||||
carpool.stops = self._replace_stops_by_transit_stops(carpool, self.REPLACEMENT_STOPS_SERACH_RADIUS_IN_M)
|
||||
|
||||
path = self._path_for_ride(carpool)
|
||||
lineString_shapely_wgs84 = LineString(coordinates = path["points"]["coordinates"]).simplify(0.0001)
|
||||
lineString_wgs84 = GeoJSONLineString(type="LineString", coordinates=list(lineString_shapely_wgs84.coords))
|
||||
virtual_stops = self.stops_store.find_additional_stops_around(lineString_wgs84, carpool.stops)
|
||||
if not virtual_stops.empty:
|
||||
virtual_stops["time"] = self._estimate_times(path, virtual_stops['distance'])
|
||||
logger.debug("Virtual stops found: {}".format(virtual_stops))
|
||||
if len(virtual_stops) > MAX_STOPS_PER_TRIP:
|
||||
# in case we found more than MAX_STOPS_PER_TRIP, we retain first and last
|
||||
# half of MAX_STOPS_PER_TRIP
|
||||
virtual_stops = virtual_stops.iloc[np.r_[0:int(MAX_STOPS_PER_TRIP/2), int(MAX_STOPS_PER_TRIP/2):]]
|
||||
|
||||
trip_id = f"{carpool.agency}:{carpool.id}"
|
||||
stop_times = self._stops_and_stop_times(carpool.departureTime, trip_id, virtual_stops)
|
||||
|
||||
enhanced_carpool = carpool.copy()
|
||||
enhanced_carpool.stops = stop_times
|
||||
enhanced_carpool.path = lineString_wgs84
|
||||
return enhanced_carpool
|
||||
|
||||
def _convert_stop_times(self, carpool):
|
||||
|
||||
stop_times = [GtfsStopTime(
|
||||
self._trip_id(carpool),
|
||||
stop.arrivalTime,
|
||||
stop.departureTime,
|
||||
stop.id,
|
||||
seq_nr+1,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_dropoff else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_STOP_TYPE_NONE if stop.pickup_dropoff == PickupDropoffType.only_pickup else STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER,
|
||||
STOP_TIMES_TIMEPOINT_APPROXIMATE)
|
||||
for seq_nr, stop in enumerate(carpool.stops)]
|
||||
return stop_times
|
||||
|
||||
def _path_for_ride(self, carpool):
|
||||
points = self._stop_coords(carpool.stops)
|
||||
return self.router.path_for_stops(points)
|
||||
|
||||
def _stop_coords(self, stops):
|
||||
# Retrieve coordinates of all officially announced stops (start, intermediate, target)
|
||||
return [Point(stop.lon, stop.lat) for stop in stops]
|
||||
|
||||
def _estimate_times(self, path, distances_from_start):
|
||||
cumulated_distance = 0
|
||||
cumulated_time = 0
|
||||
stop_times = []
|
||||
instructions = path["instructions"]
|
||||
|
||||
cnt = 0
|
||||
instr_distance = instructions[cnt]["distance"]
|
||||
instr_time = instructions[cnt]["time"]
|
||||
|
||||
for distance in distances_from_start:
|
||||
while cnt < len(instructions) and cumulated_distance + instructions[cnt]["distance"] < distance:
|
||||
cumulated_distance = cumulated_distance + instructions[cnt]["distance"]
|
||||
cumulated_time = cumulated_time + instructions[cnt]["time"]
|
||||
cnt = cnt + 1
|
||||
|
||||
if cnt < len(instructions):
|
||||
if instructions[cnt]["distance"] ==0:
|
||||
raise RoutingException("Origin and destinaction too close")
|
||||
percent_dist = (distance - cumulated_distance) / instructions[cnt]["distance"]
|
||||
stop_time = cumulated_time + percent_dist * instructions[cnt]["time"]
|
||||
stop_times.append(stop_time)
|
||||
else:
|
||||
logger.debug("distance {} exceeds total length {}, using max arrival time {}".format(distance, cumulated_distance, cumulated_time))
|
||||
stop_times.append(cumulated_time)
|
||||
return stop_times
|
||||
|
||||
def _stops_and_stop_times(self, start_time, trip_id, stops_frame):
|
||||
# Assumptions:
|
||||
# arrival_time = departure_time
|
||||
# pickup_type, drop_off_type for origin: = coordinate/none
|
||||
# pickup_type, drop_off_type for destination: = none/coordinate
|
||||
# timepoint = approximate for origin and destination (not sure what consequences this might have for trip planners)
|
||||
number_of_stops = len(stops_frame.index)
|
||||
total_distance = stops_frame.iloc[number_of_stops-1]["distance"]
|
||||
|
||||
first_stop_time = GtfsTimeDelta(hours = start_time.hour, minutes = start_time.minute, seconds = start_time.second)
|
||||
stop_times = []
|
||||
seq_nr = 0
|
||||
for i in range(0, number_of_stops):
|
||||
current_stop = stops_frame.iloc[i]
|
||||
|
||||
if not current_stop.id:
|
||||
continue
|
||||
elif i == 0:
|
||||
if (stops_frame.iloc[1].time-current_stop.time) < 1000:
|
||||
# skip custom stop if there is an official stop very close by
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
else:
|
||||
if (current_stop.time-stops_frame.iloc[i-1].time) < 5000 and not i==1 and not is_carpooling_stop(current_stop.id, current_stop.stop_name):
|
||||
# skip latter stop if it's very close (<5 seconds drive) by the preceding
|
||||
logger.debug("Skipped stop %s", current_stop.id)
|
||||
continue
|
||||
trip_time = timedelta(milliseconds=int(current_stop.time))
|
||||
is_dropoff = self._is_dropoff_stop(current_stop, total_distance)
|
||||
is_pickup = self._is_pickup_stop(current_stop, total_distance)
|
||||
# TODO would be nice if possible to publish a minimum shared distance
|
||||
pickup_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_pickup else STOP_TIMES_STOP_TYPE_NONE
|
||||
dropoff_type = STOP_TIMES_STOP_TYPE_COORDINATE_DRIVER if is_dropoff else STOP_TIMES_STOP_TYPE_NONE
|
||||
|
||||
if is_pickup and not is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_pickup
|
||||
elif not is_pickup and is_dropoff:
|
||||
pickup_dropoff = PickupDropoffType.only_dropoff
|
||||
else:
|
||||
pickup_dropoff = PickupDropoffType.pickup_and_dropoff
|
||||
|
||||
next_stop_time = first_stop_time + trip_time
|
||||
seq_nr += 1
|
||||
stop_times.append(StopTime(**{
|
||||
'arrivalTime': str(next_stop_time),
|
||||
'departureTime': str(next_stop_time),
|
||||
'id': current_stop.id,
|
||||
'pickup_dropoff': pickup_dropoff,
|
||||
'name': str(current_stop.stop_name),
|
||||
'lat': current_stop.y,
|
||||
'lon': current_stop.x
|
||||
}))
|
||||
|
||||
return stop_times
|
||||
|
||||
def _is_dropoff_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] >= 0.5 * total_distance
|
||||
|
||||
def _is_pickup_stop(self, current_stop, total_distance):
|
||||
return current_stop["distance"] < 0.5 * total_distance
|
||||
|
||||
def load_carpool(agency_id: str, carpool_id: str, folder: str ='data/enhanced') -> Carpool:
|
||||
with open(f'{folder}/{agency_id}/{carpool_id}.json', 'r', encoding='utf-8') as f:
|
||||
dict = json.load(f)
|
||||
carpool = Carpool(**dict)
|
||||
return carpool
|
||||
|
||||
def carpool_exists(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.path.exists(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
|
||||
def remove_carpool_file(agency_id: str, carpool_id: str, folder: str ='data/enhanced'):
|
||||
return os.remove(f"{folder}/{agency_id}/{carpool_id}.json")
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
from amarillo.plugins.enhancer.services import stops
|
||||
from amarillo.models.Carpool import StopTime
|
||||
|
||||
def test_load_stops_from_file():
|
||||
store = stops.StopsStore([{"url": "amarillo/plugins/enhancer/tests/stops.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_load_csv_stops_from_web_():
|
||||
store = stops.StopsStore([{"url": "https://data.mfdz.de/mfdz/stops/custom.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_load_geojson_stops_from_web_():
|
||||
store = stops.StopsStore([{"url": "https://datahub.bbnavi.de/export/rideshare_points.geojson", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
assert len(store.stopsDataFrames[0]['stops']) > 0
|
||||
|
||||
def test_find_closest_stop():
|
||||
store = stops.StopsStore([{"url": "amarillo/plugins/enhancer/tests/stops.csv", "vicinity": 50}])
|
||||
store.load_stop_sources()
|
||||
carpool_stop = StopTime(name="start", lat=53.1191, lon=14.01577)
|
||||
stop = store.find_closest_stop(carpool_stop, 1000)
|
||||
assert stop.name=='Mitfahrbank Biesenbrow'
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
from amarillo.tests.sampledata import cp1, carpool_repeating
|
||||
from amarillo.plugins.enhancer.services.trips import TripStore
|
||||
from amarillo.plugins.enhancer.services.stops import StopsStore
|
||||
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def test_trip_store_put_one_time_carpool():
|
||||
trip_store = TripStore(StopsStore())
|
||||
|
||||
t = trip_store.put_carpool(cp1)
|
||||
assert t != None
|
||||
assert len(t.stop_times) >= 2
|
||||
assert t.stop_times[0].stop_id == 'mfdz:12073:001'
|
||||
assert t.stop_times[-1].stop_id == 'de:12073:900340137::3'
|
||||
|
||||
def test_trip_store_put_repeating_carpool():
|
||||
trip_store = TripStore(StopsStore())
|
||||
|
||||
t = trip_store.put_carpool(carpool_repeating)
|
||||
assert t != None
|
||||
assert len(t.stop_times) >= 2
|
||||
28
logging.conf
Normal file
28
logging.conf
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
[loggers]
|
||||
keys=root
|
||||
|
||||
[handlers]
|
||||
keys=consoleHandler, fileHandler
|
||||
|
||||
[formatters]
|
||||
keys=simpleFormatter
|
||||
|
||||
[logger_root]
|
||||
level=INFO
|
||||
handlers=consoleHandler, fileHandler
|
||||
propagate=yes
|
||||
|
||||
[handler_consoleHandler]
|
||||
class=StreamHandler
|
||||
level=DEBUG
|
||||
formatter=simpleFormatter
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_fileHandler]
|
||||
class=handlers.RotatingFileHandler
|
||||
level=ERROR
|
||||
formatter=simpleFormatter
|
||||
args=('error.log', 'a', 1000000, 3) # Filename, mode, maxBytes, backupCount
|
||||
|
||||
[formatter_simpleFormatter]
|
||||
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
|
||||
Loading…
Reference in a new issue