Compare commits

..

10 commits

Author SHA1 Message Date
Csaba 2c10a1d676 Removed logging.conf 2024-03-08 15:22:35 +01:00
Csaba 11a6a3bbfa Added route_color and route_text_color 2024-03-08 15:13:05 +01:00
Csaba 09312b4791 Fixed leading '/' in router.py 2024-03-08 15:13:05 +01:00
Csaba abfaa053c9 test_gtfs.py 2024-03-08 15:13:05 +01:00
Csaba dd217654f7 /export endpoint 2024-03-08 15:13:05 +01:00
Csaba 993ec9fdce call configure_enhancer_services 2024-03-08 15:13:05 +01:00
Csaba a46b83567e update .gitignore 2024-03-08 15:13:05 +01:00
Csaba f494f7cccb setup function and router 2024-03-08 15:13:05 +01:00
Csaba 2aaf10a095 added GTFS export code from enhancer 2024-03-08 15:13:05 +01:00
Csaba 0f991371ba Initial commit 2024-03-08 15:13:05 +01:00
5 changed files with 95 additions and 113 deletions

48
Jenkinsfile vendored
View file

@ -1,48 +0,0 @@
pipeline {
agent { label 'builtin' }
environment {
GITEA_CREDS = credentials('AMARILLO-JENKINS-GITEA-USER')
PYPI_CREDS = credentials('AMARILLO-JENKINS-PYPI-USER')
TWINE_REPO_URL = "https://git.gerhardt.io/api/packages/amarillo/pypi"
}
stages {
stage('Create virtual environment') {
steps {
echo 'Creating virtual environment'
sh '''python3 -m venv .venv
. .venv/bin/activate'''
}
}
stage('Installing requirements') {
steps {
echo 'Installing packages'
sh 'python3 -m pip install -r requirements.txt'
sh 'python3 -m pip install --upgrade build'
sh 'python3 -m pip install --upgrade twine'
}
}
stage('Build') {
steps {
echo 'Cleaning up dist directory'
dir("dist") {
deleteDir()
}
echo 'Building package'
sh 'python3 -m build'
}
}
stage('Publish package to GI') {
steps {
sh 'python3 -m twine upload --skip-existing --verbose --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*'
}
}
stage('Publish package to PyPI') {
when {
branch 'main'
}
steps {
sh 'python3 -m twine upload --verbose --username $PYPI_CREDS_USR --password $PYPI_CREDS_PSW ./dist/*'
}
}
}
}

View file

@ -1,8 +0,0 @@
from pydantic import ConfigDict
from pydantic_settings import BaseSettings
class Config(BaseSettings):
generator_url: str = 'http://localhost:8002'
model_config = ConfigDict(extra='allow')
config = Config(_env_file='config', _env_file_encoding='utf-8')

View file

@ -1,5 +1,79 @@
from fastapi import FastAPI
from amarillo.models.Carpool import Region
from amarillo.plugins.gtfs_export.gtfs_export import GtfsExport, GtfsFeedInfo, GtfsAgency
from amarillo.plugins.gtfs_export.gtfs import GtfsRtProducer
from amarillo.utils.container import container
from amarillo.plugins.gtfs_export.router import router
from amarillo.plugins.enhancer.configuration import configure_enhancer_services
from glob import glob
import json
import schedule
import threading
import time
import logging
logger = logging.getLogger(__name__)
regions = {}
for region_file_name in glob('conf/region/*.json'):
with open(region_file_name) as region_file:
dict = json.load(region_file)
region = Region(**dict)
region_id = region.id
regions[region_id] = region
agencies = []
for agency_file_name in glob('conf/agency/*.json'):
with open(agency_file_name) as agency_file:
dict = json.load(agency_file)
agency = GtfsAgency(dict["id"], dict["name"], dict["url"], dict["timezone"], dict["lang"], dict["email"])
agency_id = agency.agency_id
agencies.append(agency)
def run_schedule():
while 1:
try:
schedule.run_pending()
except Exception as e:
logger.exception(e)
time.sleep(1)
def midnight():
container['stops_store'].load_stop_sources()
container['trips_store'].unflag_unrecent_updates()
container['carpools'].purge_outdated_offers()
generate_gtfs()
def generate_gtfs():
logger.info("Generate GTFS")
for region in regions.values():
# TODO make feed producer infos configurable
feed_info = GtfsFeedInfo('mfdz', 'MITFAHR|DE|ZENTRALE', 'http://www.mitfahrdezentrale.de', 'de', 1)
exporter = GtfsExport(
agencies,
feed_info,
container['trips_store'],
container['stops_store'],
region.bbox)
exporter.export(f"data/gtfs/amarillo.{region.id}.gtfs.zip", "data/tmp/")
def generate_gtfs_rt():
logger.info("Generate GTFS-RT")
producer = GtfsRtProducer(container['trips_store'])
for region in regions.values():
rt = producer.export_feed(time.time(), f"data/gtfs/amarillo.{region.id}.gtfsrt", bbox=region.bbox)
def start_schedule():
schedule.every().day.at("00:00").do(midnight)
schedule.every(60).seconds.do(generate_gtfs_rt)
# Create all feeds once at startup
schedule.run_all()
job_thread = threading.Thread(target=run_schedule, daemon=True)
job_thread.start()
def setup(app : FastAPI):
app.include_router(router)
configure_enhancer_services()
app.include_router(router)
start_schedule()

View file

@ -1,21 +1,23 @@
import logging
import requests
import os
from datetime import datetime, date, timedelta
from fastapi import APIRouter, HTTPException, Response, status, Depends
from fastapi import APIRouter, HTTPException, status, Depends
from amarillo.models.Carpool import Region
from amarillo.routers.agencyconf import verify_admin_api_key
from amarillo.services.regions import RegionService
from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.models.User import User
from amarillo.utils.container import container
from fastapi.responses import FileResponse
from .config import config
logger = logging.getLogger(__name__)
router = APIRouter()
@router.post("/export")
async def post_agency_conf(admin_api_key: str = Depends(verify_admin_api_key)):
#import is here to avoid circular import
from amarillo.plugins.gtfs_export.gtfs_generator import generate_gtfs
generate_gtfs()
#TODO: move to amarillo/utils?
def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions']
@ -29,19 +31,6 @@ def _assert_region_exists(region_id: str) -> Region:
return region
# File on disk is from the today
def is_cached_day(path : str):
if not os.path.isfile(path): return False
timestamp = os.path.getmtime(path)
return datetime.fromtimestamp(timestamp).date() == date.today()
# File on disk is from the last minute
def is_cached_1m(path : str):
if not os.path.isfile(path): return False
timestamp = os.path.getmtime(path)
return datetime.now() - datetime.fromtimestamp(timestamp) < timedelta(minutes=1)
@router.get("/region/{region_id}/gtfs",
summary="Return GTFS Feed for this region",
@ -51,22 +40,9 @@ def is_cached_1m(path : str):
status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
}
)
async def get_file(region_id: str, requesting_user: User = Depends(get_current_user)):
verify_permission("gtfs", requesting_user)
async def get_file(region_id: str, user: str = Depends(verify_admin_api_key)):
_assert_region_exists(region_id)
file_path = f'data/gtfs/amarillo.{region_id}.gtfs.zip'
if is_cached_day(file_path):
# logger.info("Returning cached response")
return FileResponse(file_path)
# logger.info("Returning new response")
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs/")
# cache response
if response.status_code == 200:
with open(file_path, "wb") as file:
file.write(response.content)
return Response(content=response.content, media_type="application/zip")
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip')
@router.get("/region/{region_id}/gtfs-rt",
summary="Return GTFS-RT Feed for this region",
@ -77,26 +53,12 @@ async def get_file(region_id: str, requesting_user: User = Depends(get_current_u
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
}
)
async def get_file(region_id: str, format: str = 'protobuf', requesting_user: User = Depends(get_current_user)):
verify_permission("gtfs", requesting_user)
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_admin_api_key)):
_assert_region_exists(region_id)
if format == 'json':
file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.json'
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json')
elif format == 'protobuf':
file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf'
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf')
else:
message = "Specified format is not supported, i.e. neither protobuf nor json."
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
if is_cached_1m(file_path):
# logger.info("Returning cached response")
return FileResponse(file_path)
# logger.info("Returning new response")
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs-rt/?format={format}")
# cache response
if response.status_code == 200:
with open(file_path, "wb") as file:
file.write(response.content)
return Response(content=response.content)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)

View file

@ -1,8 +1,10 @@
[project]
name = "amarillo-gtfs-exporter"
version = "0.0.4"
name = "amarillo-gtfs-export"
version = "0.0.1"
dependencies = [
"amarillo"
"amarillo",
"amarillo-enhancer",
"schedule==1.2.1",
]
[tool.setuptools.packages]