193 lines
No EOL
6.8 KiB
Python
193 lines
No EOL
6.8 KiB
Python
"""
|
|
Database dump resource for the OpenEventDatabase.
|
|
Provides endpoints to list and create database dumps.
|
|
"""
|
|
|
|
import os
|
|
import subprocess
|
|
import datetime
|
|
import falcon
|
|
import psycopg2.extras
|
|
import json
|
|
from pathlib import Path
|
|
from oedb.utils.db import db_connect
|
|
from oedb.utils.serialization import dumps
|
|
from oedb.utils.logging import logger
|
|
|
|
# Directory to store database dumps
|
|
DUMPS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../dumps'))
|
|
|
|
# Ensure the dumps directory exists
|
|
os.makedirs(DUMPS_DIR, exist_ok=True)
|
|
|
|
class DbDumpListResource:
|
|
"""
|
|
Resource for listing database dumps.
|
|
Handles the /db/dumps endpoint.
|
|
"""
|
|
|
|
def on_get(self, req, resp):
|
|
"""
|
|
Handle GET requests to the /db/dumps endpoint.
|
|
Lists all available database dumps.
|
|
|
|
Args:
|
|
req: The request object.
|
|
resp: The response object.
|
|
"""
|
|
logger.info("Processing GET request to /db/dumps")
|
|
|
|
try:
|
|
# Get list of dump files
|
|
dump_files = []
|
|
for ext in ['sql', 'geojson']:
|
|
for file_path in Path(DUMPS_DIR).glob(f'*.{ext}'):
|
|
stat = file_path.stat()
|
|
dump_files.append({
|
|
'filename': file_path.name,
|
|
'path': f'/db/dumps/{file_path.name}',
|
|
'size': stat.st_size,
|
|
'created': datetime.datetime.fromtimestamp(stat.st_ctime).isoformat(),
|
|
'type': ext
|
|
})
|
|
|
|
# Sort by creation time (newest first)
|
|
dump_files.sort(key=lambda x: x['created'], reverse=True)
|
|
|
|
resp.text = dumps({'dumps': dump_files})
|
|
resp.status = falcon.HTTP_200
|
|
logger.success("Successfully processed GET request to /db/dumps")
|
|
except Exception as e:
|
|
logger.error(f"Error processing GET request to /db/dumps: {e}")
|
|
resp.status = falcon.HTTP_500
|
|
resp.text = dumps({"error": str(e)})
|
|
|
|
class DbDumpCreateResource:
|
|
"""
|
|
Resource for creating database dumps.
|
|
Handles the /db/dumps/create endpoint.
|
|
"""
|
|
|
|
def on_post(self, req, resp):
|
|
"""
|
|
Handle POST requests to the /db/dumps/create endpoint.
|
|
Creates a new database dump in SQL and GeoJSON formats.
|
|
|
|
Args:
|
|
req: The request object.
|
|
resp: The response object.
|
|
"""
|
|
logger.info("Processing POST request to /db/dumps/create")
|
|
|
|
try:
|
|
# Generate timestamp for filenames
|
|
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
|
|
# Create SQL dump
|
|
sql_filename = f"oedb_dump_{timestamp}.sql"
|
|
sql_path = os.path.join(DUMPS_DIR, sql_filename)
|
|
|
|
# Get database connection parameters from environment
|
|
dbname = os.getenv("DB_NAME", "oedb")
|
|
host = os.getenv("DB_HOST", "localhost")
|
|
user = os.getenv("DB_USER", "postgres")
|
|
password = os.getenv("POSTGRES_PASSWORD", "")
|
|
|
|
# Set PGPASSWORD environment variable for pg_dump
|
|
env = os.environ.copy()
|
|
env["PGPASSWORD"] = password
|
|
|
|
# Execute pg_dump command
|
|
pg_dump_cmd = [
|
|
"pg_dump",
|
|
"-h", host,
|
|
"-U", user,
|
|
"-d", dbname,
|
|
"-f", sql_path
|
|
]
|
|
|
|
logger.info(f"Creating SQL dump: {sql_filename}")
|
|
subprocess.run(pg_dump_cmd, env=env, check=True)
|
|
|
|
# Create GeoJSON dump
|
|
geojson_filename = f"oedb_dump_{timestamp}.geojson"
|
|
geojson_path = os.path.join(DUMPS_DIR, geojson_filename)
|
|
|
|
# Connect to database
|
|
db = db_connect()
|
|
cur = db.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
|
|
|
# Query all events
|
|
logger.info(f"Creating GeoJSON dump: {geojson_filename}")
|
|
cur.execute("SELECT * FROM events;")
|
|
rows = cur.fetchall()
|
|
|
|
# Convert to GeoJSON
|
|
features = []
|
|
for row in rows:
|
|
# Extract geometry
|
|
geom = None
|
|
if row.get('events_where'):
|
|
try:
|
|
geom = json.loads(row['events_where'])
|
|
except:
|
|
pass
|
|
|
|
# Create feature
|
|
feature = {
|
|
"type": "Feature",
|
|
"geometry": geom or {"type": "Point", "coordinates": [0, 0]},
|
|
"properties": {
|
|
"id": row.get('events_id'),
|
|
"what": row.get('events_what'),
|
|
"label": row.get('events_label'),
|
|
"when": {
|
|
"start": row.get('events_when', {}).get('lower', None),
|
|
"stop": row.get('events_when', {}).get('upper', None)
|
|
},
|
|
"tags": row.get('events_tags'),
|
|
"createdate": row.get('createdate'),
|
|
"lastupdate": row.get('lastupdate')
|
|
}
|
|
}
|
|
features.append(feature)
|
|
|
|
# Write GeoJSON file
|
|
with open(geojson_path, 'w') as f:
|
|
json.dump({
|
|
"type": "FeatureCollection",
|
|
"features": features
|
|
}, f)
|
|
|
|
# Return information about created dumps
|
|
resp.text = dumps({
|
|
"message": "Database dumps created successfully",
|
|
"dumps": [
|
|
{
|
|
"filename": sql_filename,
|
|
"path": f"/db/dumps/{sql_filename}",
|
|
"type": "sql",
|
|
"size": os.path.getsize(sql_path)
|
|
},
|
|
{
|
|
"filename": geojson_filename,
|
|
"path": f"/db/dumps/{geojson_filename}",
|
|
"type": "geojson",
|
|
"size": os.path.getsize(geojson_path)
|
|
}
|
|
]
|
|
})
|
|
resp.status = falcon.HTTP_201
|
|
logger.success("Successfully processed POST request to /db/dumps/create")
|
|
except Exception as e:
|
|
logger.error(f"Error processing POST request to /db/dumps/create: {e}")
|
|
resp.status = falcon.HTTP_500
|
|
resp.text = dumps({"error": str(e)})
|
|
finally:
|
|
if 'db' in locals() and db:
|
|
cur.close()
|
|
db.close()
|
|
|
|
# Create resource instances
|
|
db_dump_list = DbDumpListResource()
|
|
db_dump_create = DbDumpCreateResource() |