Compare commits

..

18 commits

Author SHA1 Message Date
Csaba 651b887d86 Use built-in node
All checks were successful
Amarillo/amarillo-gitea/amarillo-gtfs-export/pipeline/head This commit looks good
2024-06-11 15:50:18 +02:00
Csaba 1b6a86d5fd Use python for venv command rather than python3
Some checks failed
Amarillo/amarillo-gitea/amarillo-gtfs-export/pipeline/head There was a failure building this commit
2024-06-11 15:25:53 +02:00
Csaba a7311b9152 Only cache 200 responses
Some checks failed
Amarillo/amarillo-gitea/amarillo-gtfs-export/pipeline/head There was a failure building this commit
2024-05-23 13:55:09 +02:00
Csaba eba7967c90 Cache GTFS-RT for 1 minute 2024-05-17 15:08:07 +02:00
Csaba ec5720af1d Call separate gtfs-generator 2024-05-17 14:20:57 +02:00
Csaba 1218098ca5 Use get_current_user 2024-04-22 13:11:42 +02:00
Csaba 40ff3354d0 Changed project name to amarillo-gtfs-exporter 2024-03-27 11:52:16 +01:00
frsaba 075c5c63f0
Added Jenkinsfile (#2)
* Initial commit

* added GTFS export code from enhancer

* setup function and router

* update .gitignore

* call configure_enhancer_services

* /export endpoint

* test_gtfs.py

* Fixed leading '/' in router.py

* Added route_color and route_text_color

* Removed logging.conf

* Added Jenkinsfile

* Publish to PyPI

---------

Co-authored-by: Francia Csaba <cf@gerhardt.io>
2024-03-21 15:45:34 +01:00
Csaba dc81db9c68 Removed logging.conf 2024-03-08 15:25:53 +01:00
Csaba b59682bb74 Added route_color and route_text_color 2024-03-08 15:25:53 +01:00
Csaba 84466ec612 Fixed leading '/' in router.py 2024-03-08 15:25:53 +01:00
Csaba 68f19aa56f test_gtfs.py 2024-03-08 15:25:53 +01:00
Csaba 3fdd340cd6 /export endpoint 2024-03-08 15:25:53 +01:00
Csaba 7d2620f056 call configure_enhancer_services 2024-03-08 15:25:53 +01:00
Csaba e6316fc212 update .gitignore 2024-03-08 15:25:53 +01:00
Csaba 94a51a7a7f setup function and router 2024-03-08 15:25:53 +01:00
Csaba 8148df41e8 added GTFS export code from enhancer 2024-03-08 15:25:53 +01:00
Csaba fbb3c0f6d8 Initial commit 2024-03-08 15:25:53 +01:00
5 changed files with 113 additions and 95 deletions

48
Jenkinsfile vendored Normal file
View file

@ -0,0 +1,48 @@
pipeline {
agent { label 'builtin' }
environment {
GITEA_CREDS = credentials('AMARILLO-JENKINS-GITEA-USER')
PYPI_CREDS = credentials('AMARILLO-JENKINS-PYPI-USER')
TWINE_REPO_URL = "https://git.gerhardt.io/api/packages/amarillo/pypi"
}
stages {
stage('Create virtual environment') {
steps {
echo 'Creating virtual environment'
sh '''python3 -m venv .venv
. .venv/bin/activate'''
}
}
stage('Installing requirements') {
steps {
echo 'Installing packages'
sh 'python3 -m pip install -r requirements.txt'
sh 'python3 -m pip install --upgrade build'
sh 'python3 -m pip install --upgrade twine'
}
}
stage('Build') {
steps {
echo 'Cleaning up dist directory'
dir("dist") {
deleteDir()
}
echo 'Building package'
sh 'python3 -m build'
}
}
stage('Publish package to GI') {
steps {
sh 'python3 -m twine upload --skip-existing --verbose --repository-url $TWINE_REPO_URL --username $GITEA_CREDS_USR --password $GITEA_CREDS_PSW ./dist/*'
}
}
stage('Publish package to PyPI') {
when {
branch 'main'
}
steps {
sh 'python3 -m twine upload --verbose --username $PYPI_CREDS_USR --password $PYPI_CREDS_PSW ./dist/*'
}
}
}
}

View file

@ -0,0 +1,8 @@
from pydantic import ConfigDict
from pydantic_settings import BaseSettings
class Config(BaseSettings):
generator_url: str = 'http://localhost:8002'
model_config = ConfigDict(extra='allow')
config = Config(_env_file='config', _env_file_encoding='utf-8')

View file

@ -1,79 +1,5 @@
from fastapi import FastAPI from fastapi import FastAPI
from amarillo.models.Carpool import Region
from amarillo.plugins.gtfs_export.gtfs_export import GtfsExport, GtfsFeedInfo, GtfsAgency
from amarillo.plugins.gtfs_export.gtfs import GtfsRtProducer
from amarillo.utils.container import container
from amarillo.plugins.gtfs_export.router import router from amarillo.plugins.gtfs_export.router import router
from amarillo.plugins.enhancer.configuration import configure_enhancer_services
from glob import glob
import json
import schedule
import threading
import time
import logging
logger = logging.getLogger(__name__)
regions = {}
for region_file_name in glob('conf/region/*.json'):
with open(region_file_name) as region_file:
dict = json.load(region_file)
region = Region(**dict)
region_id = region.id
regions[region_id] = region
agencies = []
for agency_file_name in glob('conf/agency/*.json'):
with open(agency_file_name) as agency_file:
dict = json.load(agency_file)
agency = GtfsAgency(dict["id"], dict["name"], dict["url"], dict["timezone"], dict["lang"], dict["email"])
agency_id = agency.agency_id
agencies.append(agency)
def run_schedule():
while 1:
try:
schedule.run_pending()
except Exception as e:
logger.exception(e)
time.sleep(1)
def midnight():
container['stops_store'].load_stop_sources()
container['trips_store'].unflag_unrecent_updates()
container['carpools'].purge_outdated_offers()
generate_gtfs()
def generate_gtfs():
logger.info("Generate GTFS")
for region in regions.values():
# TODO make feed producer infos configurable
feed_info = GtfsFeedInfo('mfdz', 'MITFAHR|DE|ZENTRALE', 'http://www.mitfahrdezentrale.de', 'de', 1)
exporter = GtfsExport(
agencies,
feed_info,
container['trips_store'],
container['stops_store'],
region.bbox)
exporter.export(f"data/gtfs/amarillo.{region.id}.gtfs.zip", "data/tmp/")
def generate_gtfs_rt():
logger.info("Generate GTFS-RT")
producer = GtfsRtProducer(container['trips_store'])
for region in regions.values():
rt = producer.export_feed(time.time(), f"data/gtfs/amarillo.{region.id}.gtfsrt", bbox=region.bbox)
def start_schedule():
schedule.every().day.at("00:00").do(midnight)
schedule.every(60).seconds.do(generate_gtfs_rt)
# Create all feeds once at startup
schedule.run_all()
job_thread = threading.Thread(target=run_schedule, daemon=True)
job_thread.start()
def setup(app : FastAPI): def setup(app : FastAPI):
configure_enhancer_services() app.include_router(router)
app.include_router(router)
start_schedule()

View file

@ -1,23 +1,21 @@
import logging import logging
import requests
from fastapi import APIRouter, HTTPException, status, Depends import os
from datetime import datetime, date, timedelta
from fastapi import APIRouter, HTTPException, Response, status, Depends
from amarillo.models.Carpool import Region from amarillo.models.Carpool import Region
from amarillo.routers.agencyconf import verify_admin_api_key
from amarillo.services.regions import RegionService from amarillo.services.regions import RegionService
from amarillo.services.oauth2 import get_current_user, verify_permission
from amarillo.models.User import User
from amarillo.utils.container import container from amarillo.utils.container import container
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from .config import config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
@router.post("/export")
async def post_agency_conf(admin_api_key: str = Depends(verify_admin_api_key)):
#import is here to avoid circular import
from amarillo.plugins.gtfs_export.gtfs_generator import generate_gtfs
generate_gtfs()
#TODO: move to amarillo/utils? #TODO: move to amarillo/utils?
def _assert_region_exists(region_id: str) -> Region: def _assert_region_exists(region_id: str) -> Region:
regions: RegionService = container['regions'] regions: RegionService = container['regions']
@ -31,6 +29,19 @@ def _assert_region_exists(region_id: str) -> Region:
return region return region
# File on disk is from the today
def is_cached_day(path : str):
if not os.path.isfile(path): return False
timestamp = os.path.getmtime(path)
return datetime.fromtimestamp(timestamp).date() == date.today()
# File on disk is from the last minute
def is_cached_1m(path : str):
if not os.path.isfile(path): return False
timestamp = os.path.getmtime(path)
return datetime.now() - datetime.fromtimestamp(timestamp) < timedelta(minutes=1)
@router.get("/region/{region_id}/gtfs", @router.get("/region/{region_id}/gtfs",
summary="Return GTFS Feed for this region", summary="Return GTFS Feed for this region",
@ -40,9 +51,22 @@ def _assert_region_exists(region_id: str) -> Region:
status.HTTP_404_NOT_FOUND: {"description": "Region not found"}, status.HTTP_404_NOT_FOUND: {"description": "Region not found"},
} }
) )
async def get_file(region_id: str, user: str = Depends(verify_admin_api_key)): async def get_file(region_id: str, requesting_user: User = Depends(get_current_user)):
verify_permission("gtfs", requesting_user)
_assert_region_exists(region_id) _assert_region_exists(region_id)
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfs.zip') file_path = f'data/gtfs/amarillo.{region_id}.gtfs.zip'
if is_cached_day(file_path):
# logger.info("Returning cached response")
return FileResponse(file_path)
# logger.info("Returning new response")
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs/")
# cache response
if response.status_code == 200:
with open(file_path, "wb") as file:
file.write(response.content)
return Response(content=response.content, media_type="application/zip")
@router.get("/region/{region_id}/gtfs-rt", @router.get("/region/{region_id}/gtfs-rt",
summary="Return GTFS-RT Feed for this region", summary="Return GTFS-RT Feed for this region",
@ -53,12 +77,26 @@ async def get_file(region_id: str, user: str = Depends(verify_admin_api_key)):
status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."} status.HTTP_400_BAD_REQUEST: {"description": "Bad request, e.g. because format is not supported, i.e. neither protobuf nor json."}
} }
) )
async def get_file(region_id: str, format: str = 'protobuf', user: str = Depends(verify_admin_api_key)): async def get_file(region_id: str, format: str = 'protobuf', requesting_user: User = Depends(get_current_user)):
verify_permission("gtfs", requesting_user)
_assert_region_exists(region_id) _assert_region_exists(region_id)
if format == 'json': if format == 'json':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.json') file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.json'
elif format == 'protobuf': elif format == 'protobuf':
return FileResponse(f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf') file_path = f'data/gtfs/amarillo.{region_id}.gtfsrt.pbf'
else: else:
message = "Specified format is not supported, i.e. neither protobuf nor json." message = "Specified format is not supported, i.e. neither protobuf nor json."
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
if is_cached_1m(file_path):
# logger.info("Returning cached response")
return FileResponse(file_path)
# logger.info("Returning new response")
response = requests.get(f"{config.generator_url}/region/{region_id}/gtfs-rt/?format={format}")
# cache response
if response.status_code == 200:
with open(file_path, "wb") as file:
file.write(response.content)
return Response(content=response.content)

View file

@ -1,10 +1,8 @@
[project] [project]
name = "amarillo-gtfs-export" name = "amarillo-gtfs-exporter"
version = "0.0.1" version = "0.0.4"
dependencies = [ dependencies = [
"amarillo", "amarillo"
"amarillo-enhancer",
"schedule==1.2.1",
] ]
[tool.setuptools.packages] [tool.setuptools.packages]