Call separate gtfs-generator
This commit is contained in:
parent
1218098ca5
commit
ec5720af1d
8
amarillo/plugins/gtfs_export/config.py
Normal file
8
amarillo/plugins/gtfs_export/config.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
from pydantic import ConfigDict
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
class Config(BaseSettings):
|
||||
generator_url: str = 'http://localhost:8002'
|
||||
model_config = ConfigDict(extra='allow')
|
||||
|
||||
config = Config(_env_file='config', _env_file_encoding='utf-8')
|
||||
|
|
@ -1,79 +1,5 @@
|
|||
from fastapi import FastAPI
|
||||
|
||||
from amarillo.models.Carpool import Region
|
||||
from amarillo.plugins.gtfs_export.gtfs_export import GtfsExport, GtfsFeedInfo, GtfsAgency
|
||||
from amarillo.plugins.gtfs_export.gtfs import GtfsRtProducer
|
||||
from amarillo.utils.container import container
|
||||
from amarillo.plugins.gtfs_export.router import router
|
||||
from amarillo.plugins.enhancer.configuration import configure_enhancer_services
|
||||
from glob import glob
|
||||
import json
|
||||
import schedule
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
regions = {}
|
||||
for region_file_name in glob('conf/region/*.json'):
|
||||
with open(region_file_name) as region_file:
|
||||
dict = json.load(region_file)
|
||||
region = Region(**dict)
|
||||
region_id = region.id
|
||||
regions[region_id] = region
|
||||
|
||||
agencies = []
|
||||
for agency_file_name in glob('conf/agency/*.json'):
|
||||
with open(agency_file_name) as agency_file:
|
||||
dict = json.load(agency_file)
|
||||
agency = GtfsAgency(dict["id"], dict["name"], dict["url"], dict["timezone"], dict["lang"], dict["email"])
|
||||
agency_id = agency.agency_id
|
||||
agencies.append(agency)
|
||||
|
||||
def run_schedule():
|
||||
while 1:
|
||||
try:
|
||||
schedule.run_pending()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
time.sleep(1)
|
||||
|
||||
def midnight():
|
||||
container['stops_store'].load_stop_sources()
|
||||
container['trips_store'].unflag_unrecent_updates()
|
||||
container['carpools'].purge_outdated_offers()
|
||||
generate_gtfs()
|
||||
|
||||
def generate_gtfs():
|
||||
logger.info("Generate GTFS")
|
||||
|
||||
for region in regions.values():
|
||||
# TODO make feed producer infos configurable
|
||||
feed_info = GtfsFeedInfo('mfdz', 'MITFAHR|DE|ZENTRALE', 'http://www.mitfahrdezentrale.de', 'de', 1)
|
||||
exporter = GtfsExport(
|
||||
agencies,
|
||||
feed_info,
|
||||
container['trips_store'],
|
||||
container['stops_store'],
|
||||
region.bbox)
|
||||
exporter.export(f"data/gtfs/amarillo.{region.id}.gtfs.zip", "data/tmp/")
|
||||
|
||||
def generate_gtfs_rt():
|
||||
logger.info("Generate GTFS-RT")
|
||||
producer = GtfsRtProducer(container['trips_store'])
|
||||
for region in regions.values():
|
||||
rt = producer.export_feed(time.time(), f"data/gtfs/amarillo.{region.id}.gtfsrt", bbox=region.bbox)
|
||||
|
||||
def start_schedule():
|
||||
schedule.every().day.at("00:00").do(midnight)
|
||||
schedule.every(60).seconds.do(generate_gtfs_rt)
|
||||
# Create all feeds once at startup
|
||||
schedule.run_all()
|
||||
job_thread = threading.Thread(target=run_schedule, daemon=True)
|
||||
job_thread.start()
|
||||
|
||||
def setup(app : FastAPI):
|
||||
configure_enhancer_services()
|
||||
app.include_router(router)
|
||||
start_schedule()
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
import requests
|
||||
import os
|
||||
from datetime import datetime, date, timedelta
|
||||
from fastapi import APIRouter, HTTPException, Response, status, Depends
|
||||
|
||||
from amarillo.models.Carpool import Region
|
||||
from amarillo.services.regions import RegionService
|
||||
|
|
@ -8,18 +10,12 @@ from amarillo.services.oauth2 import get_current_user, verify_permission
|
|||
from amarillo.models.User import User
|
||||
from amarillo.utils.container import container
|
||||
from fastapi.responses import FileResponse
|
||||
from .config import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/export")
|
||||
async def trigger_export(requesting_user: User = Depends(get_current_user)):
|
||||
verify_permission("admin", requesting_user)
|
||||
#import is here to avoid circular import
|
||||
from amarillo.plugins.gtfs_export.gtfs_generator import generate_gtfs
|
||||
generate_gtfs()
|
||||
|
||||
#TODO: move to amarillo/utils?
|
||||
def _assert_region_exists(region_id: str) -> Region:
|
||||
regions: RegionService = container['regions']
|
||||
|
|
@ -33,6 +29,11 @@ def _assert_region_exists(region_id: str) -> Region:
|
|||
|
||||
return region
|
||||
|
||||
def is_cached(path : str):
|
||||
if not os.path.isfile(path): return False
|
||||
|
||||
timestamp = os.path.getmtime(path)
|
||||
return datetime.fromtimestamp(timestamp).date() == date.today()
|
||||
|
||||
@router.get("/region/{region_id}/gtfs",
|
||||
summary="Return GTFS Feed for this region",
|
||||
|
|
@ -43,8 +44,20 @@ def _assert_region_exists(region_id: str) -> Region:
|
|||
}
|
||||
)
|
||||
async def get_file(region_id: str, requesting_user: User = Depends(get_current_user)):
|
||||
verify_permission("gtfs", requesting_user)
|
||||
_assert_region_exists(region_id)
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
|
||||
file_path = f'data/gtfs/amarillo.{region_id}.gtfs.zip'
|
||||
if is_cached(file_path):
|
||||
logger.info("Returning cached response")
|
||||
return FileResponse(file_path)
|
||||
|
||||
logger.info("Returning new response")
|
||||
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs/")
|
||||
# cache response
|
||||
with open(file_path, "wb") as file:
|
||||
file.write(response.content)
|
||||
|
||||
return Response(content=response.content, media_type="application/zip")
|
||||
|
||||
@router.get("/region/{region_id}/gtfs-rt",
|
||||
summary="Return GTFS-RT Feed for this region",
|
||||
|
|
@ -56,11 +69,24 @@ async def get_file(region_id: str, requesting_user: User = Depends(get_current_u
|
|||
}
|
||||
)
|
||||
async def get_file(region_id: str, format: str = 'protobuf', requesting_user: User = Depends(get_current_user)):
|
||||
verify_permission("gtfs", requesting_user)
|
||||
_assert_region_exists(region_id)
|
||||
if format == 'json':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
|
||||
file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.json'
|
||||
elif format == 'protobuf':
|
||||
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
|
||||
file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf'
|
||||
else:
|
||||
message = "Specified format is not supported, i.e. neither protobuf nor json."
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
|
||||
|
||||
if is_cached(file_path):
|
||||
logger.info("Returning cached response")
|
||||
return FileResponse(file_path)
|
||||
|
||||
logger.info("Returning new response")
|
||||
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs-rt/?format={format}")
|
||||
# cache response
|
||||
with open(file_path, "wb") as file:
|
||||
file.write(response.content)
|
||||
|
||||
return Response(content=response.content)
|
||||
|
|
@ -1,10 +1,8 @@
|
|||
[project]
|
||||
name = "amarillo-gtfs-exporter"
|
||||
version = "0.0.1"
|
||||
version = "0.0.2"
|
||||
dependencies = [
|
||||
"amarillo",
|
||||
"amarillo-enhancer",
|
||||
"schedule==1.2.1",
|
||||
"amarillo"
|
||||
]
|
||||
|
||||
[tool.setuptools.packages]
|
||||
|
|
|
|||
Loading…
Reference in a new issue