diff --git a/README.md b/README.md index f9daf31..133a273 100644 --- a/README.md +++ b/README.md @@ -477,6 +477,45 @@ e.g. show a specific survey patch. ] ``` +### Using a Database For Configuration + +In some cases, you may wish to use a database instead of a configuration file for your +main configuration provider. This would be in cases where, for example, you need to +periodically update your configuration without restarting the server, such as a +production service. We provide integration with SQLite to manage your tilemaker +configuration, and a command-line tool to view the contents and delete items from the +database. + +To use the database, you simply need to set the configuration file location to +one including the `sqlite://` prefix. For example: +``` +export TILEMAKER_CONFIG_PATH="sqlite:///database.db" +``` +With this enabled, the CLI tool `tilemaker-db` will be available. You can then +ingest any existing config file: +``` +tilemaker-db populate config.json +``` +If you have a partially populated database, the `tilemaker-db populate` upserts +it to only add the new information and not duplicates. You can view the contents +of your database using the `tilemaker-db list` command, which supports +`group`, `map`, `band`, `layer`, `box`, `source_group`, and `source`: +``` +tilemaker-db list band +>>> band-d7fc6c Auto-Populated +>>> band-bffafd Auto-Populated +>>> band-29317f Auto-Populated +>>> band-893cba Auto-Populated +>>> band-e36ce0 Auto-Populated +>>> band-dd8b6f Auto-Populated +``` +You can delete each of these items too, by their unique identifier (the first +column): +``` +tilemaker-db delete band band-d7fc6c +``` +Any and all changes will be reflected on refresh of the map viewer client. + Map Viewer ---------- diff --git a/pyproject.toml b/pyproject.toml index 0d64172..6184b9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ dependencies = [ [project.scripts] tilemaker = "tilemaker.client.cli:main" +tilemaker-db = "tilemaker.metadata.database:main" [tool.ruff.lint] extend-select = ["I"] diff --git a/tilemaker/metadata/database.py b/tilemaker/metadata/database.py new file mode 100644 index 0000000..3394706 --- /dev/null +++ b/tilemaker/metadata/database.py @@ -0,0 +1,635 @@ +""" +A database-backed implementation of the DataConfiguration object. Used in produciton +when you need to be able to dynamically update the available maps. Comes along +with tools to populate the database from a static configuration file and delete +entries as needed. +""" + +import itertools +from typing import Iterable + +import structlog +from sqlalchemy import create_engine +from sqlalchemy.orm import Session, sessionmaker + +from .boxes import Box +from .core import DataConfiguration +from .definitions import ( + Band, + Layer, + Map, + MapGroup, +) +from .fits import FITSLayerProvider +from .orm import ( + BandORM, + Base, + BoxORM, + LayerORM, + MapGroupORM, + MapORM, + SourceGroupORM, + SourceORM, +) +from .sources import Source, SourceGroup + + +class DatabaseDataConfiguration: + """ + A database-backed implementation of the DataConfiguration object using SQLAlchemy. + """ + + def __init__(self, database_url: str): + """ + Initialize the database configuration. + + Parameters + ---------- + database_url : str + SQLAlchemy database URL (e.g., 'sqlite:///config.db', 'postgresql://user:password@localhost/dbname') + """ + self.engine = create_engine(database_url) + self.session_maker = sessionmaker(bind=self.engine) + self.log = structlog.get_logger() + + def create_tables(self): + """Create all tables in the database.""" + Base.metadata.create_all(self.engine) + + @property + def map_groups(self) -> list[MapGroup]: + """Retrieve all map groups from the database.""" + with self.session_maker() as session: + orm_groups = session.query(MapGroupORM).all() + return [ + self._orm_to_map_group(session, orm_group) for orm_group in orm_groups + ] + + @property + def boxes(self) -> list[Box]: + """Retrieve all boxes from the database.""" + with self.session_maker() as session: + orm_boxes = session.query(BoxORM).all() + return [self._orm_to_box(orm_box) for orm_box in orm_boxes] + + @property + def source_groups(self) -> list[SourceGroup]: + """Retrieve all source groups from the database.""" + with self.session_maker() as session: + orm_groups = session.query(SourceGroupORM).all() + return [ + self._orm_to_source_group(session, orm_group) + for orm_group in orm_groups + ] + + @property + def layers(self) -> Iterable[Layer]: + """Retrieve all layers from the database.""" + return itertools.chain.from_iterable( + band.layers + for group in self.map_groups + for map in group.maps + for band in map.bands + ) + + def layer(self, layer_id: str) -> Layer | None: + """Retrieve a specific layer by its ID.""" + with self.session_maker() as session: + orm_layer = session.query(LayerORM).filter_by(layer_id=layer_id).first() + if orm_layer is None: + return None + return self._orm_to_layer(session, orm_layer) + + def source_group(self, source_group_id: str) -> SourceGroup | None: + """Retrieve a specific source group by its ID.""" + with self.session_maker() as session: + orm_group = ( + session.query(SourceGroupORM) + .filter_by(source_group_id=source_group_id) + .first() + ) + if orm_group is None: + return None + return self._orm_to_source_group(session, orm_group) + + # Deletion methods + def delete_layer(self, layer_id: str) -> bool: + """Delete a layer by its ID.""" + with self.session_maker() as session: + orm_layer = session.query(LayerORM).filter_by(layer_id=layer_id).first() + if orm_layer is None: + return False + session.delete(orm_layer) + session.commit() + return True + + def delete_band(self, band_id: str, map_id: str | None = None) -> bool: + """Delete a band by band_id (optionally scoping by map_id to disambiguate).""" + with self.session_maker() as session: + query = session.query(BandORM).filter_by(band_id=band_id) + if map_id is not None: + query = query.join(MapORM).filter(MapORM.map_id == map_id) + orm_band = query.first() + if orm_band is None: + return False + session.delete(orm_band) + session.commit() + return True + + def delete_map(self, map_id: str) -> bool: + """Delete a map by its map_id.""" + with self.session_maker() as session: + orm_map = session.query(MapORM).filter_by(map_id=map_id).first() + if orm_map is None: + return False + session.delete(orm_map) + session.commit() + return True + + def delete_map_group(self, name: str) -> bool: + """Delete a map group by its name.""" + with self.session_maker() as session: + orm_group = session.query(MapGroupORM).filter_by(name=name).first() + if orm_group is None: + return False + session.delete(orm_group) + session.commit() + return True + + # Conversion methods + def _orm_to_box(self, orm_box: BoxORM) -> Box: + """Convert ORM Box to Pydantic Box.""" + return Box( + name=orm_box.name, + description=orm_box.description, + top_left_ra=orm_box.top_left_ra, + top_left_dec=orm_box.top_left_dec, + bottom_right_ra=orm_box.bottom_right_ra, + bottom_right_dec=orm_box.bottom_right_dec, + grant=orm_box.grant, + ) + + def _orm_to_source(self, orm_source: SourceORM) -> Source: + """Convert ORM Source to Pydantic Source.""" + return Source( + name=orm_source.name, + ra=orm_source.ra, + dec=orm_source.dec, + extra=orm_source.extra, + ) + + def _orm_to_source_group( + self, session: Session, orm_group: SourceGroupORM + ) -> SourceGroup: + """Convert ORM SourceGroup to Pydantic SourceGroup.""" + sources = [self._orm_to_source(src) for src in orm_group.sources] + return SourceGroup( + source_group_id=orm_group.source_group_id, + name=orm_group.name, + description=orm_group.description, + grant=orm_group.grant, + sources=sources, + ) + + def _orm_to_layer(self, session: Session, orm_layer: LayerORM) -> Layer: + """Convert ORM Layer to Pydantic Layer.""" + from pydantic import TypeAdapter + + # Deserialize provider from JSON using Pydantic + provider_adapter = TypeAdapter(FITSLayerProvider) + provider = provider_adapter.validate_python(orm_layer.provider) + + # Convert vmin/vmax from string storage to proper type + vmin = orm_layer.vmin + if vmin is not None and vmin != "auto": + vmin = float(vmin) + + vmax = orm_layer.vmax + if vmax is not None and vmax != "auto": + vmax = float(vmax) + + return Layer( + layer_id=orm_layer.layer_id, + name=orm_layer.name, + description=orm_layer.description, + provider=provider, + bounding_left=orm_layer.bounding_left, + bounding_right=orm_layer.bounding_right, + bounding_top=orm_layer.bounding_top, + bounding_bottom=orm_layer.bounding_bottom, + quantity=orm_layer.quantity, + units=orm_layer.units, + number_of_levels=orm_layer.number_of_levels, + tile_size=orm_layer.tile_size, + vmin=vmin, + vmax=vmax, + cmap=orm_layer.cmap, + grant=orm_layer.grant, + ) + + def _orm_to_band(self, session: Session, orm_band: BandORM) -> Band: + """Convert ORM Band to Pydantic Band.""" + layers = [self._orm_to_layer(session, layer) for layer in orm_band.layers] + return Band( + band_id=orm_band.band_id, + name=orm_band.name, + description=orm_band.description, + layers=layers, + grant=orm_band.grant, + ) + + def _orm_to_map(self, session: Session, orm_map: MapORM) -> Map: + """Convert ORM Map to Pydantic Map.""" + bands = [self._orm_to_band(session, band) for band in orm_map.bands] + return Map( + map_id=orm_map.map_id, + name=orm_map.name, + description=orm_map.description, + bands=bands, + grant=orm_map.grant, + ) + + def _orm_to_map_group(self, session: Session, orm_group: MapGroupORM) -> MapGroup: + """Convert ORM MapGroup to Pydantic MapGroup.""" + maps = [self._orm_to_map(session, map) for map in orm_group.maps] + return MapGroup( + name=orm_group.name, + description=orm_group.description, + maps=maps, + grant=orm_group.grant, + ) + + def populate_from_config(self, config: "DataConfiguration") -> None: + """ + Populate the database from a pre-existing DataConfiguration object. + + Parameters + ---------- + config : DataConfiguration + A DataConfiguration object (from core.py) containing map groups, boxes, and source groups. + """ + from pydantic import TypeAdapter + + with self.session_maker() as session: + # Populate map groups, maps, bands, and layers without duplicating existing rows + for map_group in config.map_groups: + orm_group = ( + session.query(MapGroupORM).filter_by(name=map_group.name).first() + ) + if orm_group is None: + orm_group = MapGroupORM( + name=map_group.name, + description=map_group.description, + grant=map_group.grant, + ) + session.add(orm_group) + session.flush() + else: + orm_group.description = map_group.description + orm_group.grant = map_group.grant + session.flush() + + for map in map_group.maps: + orm_map = session.query(MapORM).filter_by(map_id=map.map_id).first() + if orm_map is None: + orm_map = MapORM( + map_id=map.map_id, + name=map.name, + description=map.description, + grant=map.grant, + map_group_id=orm_group.id, + ) + session.add(orm_map) + session.flush() + else: + orm_map.name = map.name + orm_map.description = map.description + orm_map.grant = map.grant + orm_map.map_group_id = orm_group.id + session.flush() + + for band in map.bands: + orm_band = ( + session.query(BandORM) + .filter( + BandORM.band_id == band.band_id, + BandORM.map_id == orm_map.id, + ) + .first() + ) + + if orm_band is None: + orm_band = BandORM( + band_id=band.band_id, + name=band.name, + description=band.description, + grant=band.grant, + map_id=orm_map.id, + ) + session.add(orm_band) + session.flush() + else: + orm_band.name = band.name + orm_band.description = band.description + orm_band.grant = band.grant + orm_band.map_id = orm_map.id + session.flush() + + for layer in band.layers: + provider_adapter = TypeAdapter(type(layer.provider)) + provider_dict = provider_adapter.dump_python( + layer.provider, mode="json" + ) + + vmin_str = None if layer.vmin is None else str(layer.vmin) + vmax_str = None if layer.vmax is None else str(layer.vmax) + + orm_layer = ( + session.query(LayerORM) + .filter_by(layer_id=layer.layer_id) + .first() + ) + + if orm_layer is None: + orm_layer = LayerORM( + layer_id=layer.layer_id, + name=layer.name, + description=layer.description, + grant=layer.grant, + band_id=orm_band.id, + quantity=layer.quantity, + units=layer.units, + number_of_levels=layer.number_of_levels, + tile_size=layer.tile_size, + vmin=vmin_str, + vmax=vmax_str, + cmap=layer.cmap, + provider=provider_dict, + bounding_left=layer.bounding_left, + bounding_right=layer.bounding_right, + bounding_top=layer.bounding_top, + bounding_bottom=layer.bounding_bottom, + ) + session.add(orm_layer) + else: + orm_layer.name = layer.name + orm_layer.description = layer.description + orm_layer.grant = layer.grant + orm_layer.band_id = orm_band.id + orm_layer.quantity = layer.quantity + orm_layer.units = layer.units + orm_layer.number_of_levels = layer.number_of_levels + orm_layer.tile_size = layer.tile_size + orm_layer.vmin = vmin_str + orm_layer.vmax = vmax_str + orm_layer.cmap = layer.cmap + orm_layer.provider = provider_dict + orm_layer.bounding_left = layer.bounding_left + orm_layer.bounding_right = layer.bounding_right + orm_layer.bounding_top = layer.bounding_top + orm_layer.bounding_bottom = layer.bounding_bottom + + # Populate boxes without duplicates (keyed by name) + for box in config.boxes: + orm_box = session.query(BoxORM).filter_by(name=box.name).first() + if orm_box is None: + orm_box = BoxORM( + name=box.name, + description=box.description, + top_left_ra=box.top_left_ra, + top_left_dec=box.top_left_dec, + bottom_right_ra=box.bottom_right_ra, + bottom_right_dec=box.bottom_right_dec, + grant=box.grant, + ) + session.add(orm_box) + else: + orm_box.description = box.description + orm_box.top_left_ra = box.top_left_ra + orm_box.top_left_dec = box.top_left_dec + orm_box.bottom_right_ra = box.bottom_right_ra + orm_box.bottom_right_dec = box.bottom_right_dec + orm_box.grant = box.grant + + # Populate source groups and sources without duplicates + for source_group in config.source_groups: + orm_source_group = ( + session.query(SourceGroupORM) + .filter_by(source_group_id=source_group.source_group_id) + .first() + ) + + if orm_source_group is None: + orm_source_group = SourceGroupORM( + source_group_id=source_group.source_group_id, + name=source_group.name, + description=source_group.description, + grant=source_group.grant, + ) + session.add(orm_source_group) + session.flush() + else: + orm_source_group.name = source_group.name + orm_source_group.description = source_group.description + orm_source_group.grant = source_group.grant + session.flush() + + # Replace sources for this group to avoid duplication + session.query(SourceORM).filter_by( + source_group_id=orm_source_group.id + ).delete(synchronize_session=False) + + if source_group.sources: + for source in source_group.sources: + orm_source = SourceORM( + name=source.name, + ra=source.ra, + dec=source.dec, + extra=source.extra, + source_group_id=orm_source_group.id, + ) + session.add(orm_source) + + session.commit() + self.log.info("database.populated_from_config") + + +def main(): + """ + Run the CLI wrapper to help with managing the databases. There are a number of + commands: + + tilemaker-db {group,map,layer,band,source_group,box,source} delete + Delete an entry from the database by its ID. + + tilemaker-db {group,map,layer,band,source_group,box,source} list + List all entries of a given type in the database. + + tilemaker-db populate + Populate the database from a static configuration file. + + tilemaker-db details + Show summary details about the database contents. + + Note that the database configuration details are as specified in your + tilemaker central configuration. + """ + import argparse as ap + from pathlib import Path + + from tilemaker.settings import settings + + database_configuration = settings.parse_config() + + if not isinstance(database_configuration, DatabaseDataConfiguration): + print( + "This CLI only works with database-backed configurations loaded via settings." + ) + return + + parser = ap.ArgumentParser(description="Tilemaker database management CLI") + + subparsers = parser.add_subparsers(dest="command", required=True) + + populate_parser = subparsers.add_parser( + "populate", help="Populate the database from a static config JSON file" + ) + populate_parser.add_argument( + "config", + help="Path to the JSON configuration file (same schema as static config)", + ) + + list_parser = subparsers.add_parser("list", help="List entries of a given type") + list_parser.add_argument( + "entity", + choices=["group", "map", "band", "layer", "box", "source_group", "source"], + ) + + bands_parser = subparsers.add_parser( + "bands", help="List all bands for a specific map" + ) + bands_parser.add_argument("map_id", help="The map ID") + + layers_parser = subparsers.add_parser( + "layers", help="List all layers for a specific map" + ) + layers_parser.add_argument("map_id", help="The map ID") + + delete_parser = subparsers.add_parser( + "delete", help="Delete an entry of a given type by identifier" + ) + delete_parser.add_argument( + "entity", + choices=["group", "map", "band", "layer", "source_group"], + help="Entity type to delete. Boxes/sources require manual handling.", + ) + delete_parser.add_argument("identifier", help="Identifier (e.g., layer_id, map_id)") + delete_parser.add_argument( + "--map-id", + help="Map ID to disambiguate band deletes (optional)", + ) + + subparsers.add_parser( + "details", help="Show summary details about the database contents" + ) + + args = parser.parse_args() + + if args.command == "populate": + database_configuration.create_tables() + # Load static config file as DataConfiguration for ingestion + cfg_json = Path(args.config).read_text() + ingest_cfg = DataConfiguration.model_validate_json(cfg_json) + database_configuration.populate_from_config(ingest_cfg) + print("Database populated from config") + return + + if args.command == "list": + if args.entity == "group": + for g in database_configuration.map_groups: + print(g.name) + elif args.entity == "map": + for g in database_configuration.map_groups: + for m in g.maps: + print(m.map_id, m.name) + elif args.entity == "band": + for g in database_configuration.map_groups: + for m in g.maps: + for b in m.bands: + print(b.band_id, b.name) + elif args.entity == "layer": + for layer in database_configuration.layers: + print(layer.layer_id, layer.name) + elif args.entity == "box": + for b in database_configuration.boxes: + print(b.name) + elif args.entity == "source_group": + for sg in database_configuration.source_groups: + print(sg.source_group_id) + elif args.entity == "source": + for sg in database_configuration.source_groups: + if sg.sources: + for s in sg.sources: + print(f"{sg.source_group_id}:{s.name}") + return + + if args.command == "bands": + found = False + for g in database_configuration.map_groups: + for m in g.maps: + if m.map_id == args.map_id: + found = True + for b in m.bands: + print(b.band_id, b.name) + if not found: + print(f"Map {args.map_id} not found") + return + + if args.command == "layers": + found = False + for g in database_configuration.map_groups: + for m in g.maps: + if m.map_id == args.map_id: + found = True + for b in m.bands: + for layer in b.layers: + print(layer.layer_id, layer.name) + if not found: + print(f"Map {args.map_id} not found") + + if args.command == "delete": + ok = False + if args.entity == "layer": + ok = database_configuration.delete_layer(args.identifier) + elif args.entity == "band": + ok = database_configuration.delete_band(args.identifier, map_id=args.map_id) + elif args.entity == "map": + ok = database_configuration.delete_map(args.identifier) + elif args.entity == "group": + ok = database_configuration.delete_map_group(args.identifier) + elif args.entity == "source_group": + with database_configuration.session_maker() as session: + orm_sg = ( + session.query(SourceGroupORM) + .filter_by(source_group_id=args.identifier) + .first() + ) + if orm_sg: + session.delete(orm_sg) + session.commit() + ok = True + if ok: + print("Deleted", args.entity, args.identifier) + else: + print(args.entity, args.identifier, "not found") + return + + if args.command == "details": + print(f"Map groups: {len(database_configuration.map_groups)}") + print(f"Maps: {sum(len(g.maps) for g in database_configuration.map_groups)}") + print(f"Layers: {sum(1 for _ in database_configuration.layers)}") + print(f"Boxes: {len(database_configuration.boxes)}") + sg = database_configuration.source_groups + print(f"Source groups: {len(sg)}") + print(f"Sources: {sum(len(x.sources or []) for x in sg)}") + return diff --git a/tilemaker/metadata/definitions.py b/tilemaker/metadata/definitions.py index d290623..8c2bd71 100644 --- a/tilemaker/metadata/definitions.py +++ b/tilemaker/metadata/definitions.py @@ -67,15 +67,12 @@ ``` """ -import math -from pathlib import Path from typing import Literal -from astropy import units -from astropy.io import fits -from astropy.wcs import WCS from pydantic import BaseModel +from .fits import FITSLayerProvider + class AuthenticatedModel(BaseModel): grant: str | None = None @@ -84,96 +81,6 @@ def auth(self, grants: set[str]): return self.grant is None or self.grant in grants -class LayerProvider(BaseModel): - provider_type: Literal["fits"] = "fits" - - def get_bbox(self) -> dict[str, float]: - return - - -class FITSLayerProvider(LayerProvider): - provider_type: Literal["fits"] = "fits" - filename: Path - hdu: int = 0 - index: int | None = None - - def get_bbox(self) -> dict[str, float]: - with fits.open(self.filename) as handle: - data = handle[self.hdu] - wcs = WCS(header=data.header) - - top_right = wcs.array_index_to_world(*[0] * data.header.get("NAXIS", 2)) - bottom_left = wcs.array_index_to_world(*[x - 1 for x in data.data.shape]) - - def sanitize(x): - return ( - x[0].ra - if x[0].ra < 180.0 * units.deg - else x[0].ra - 360.0 * units.deg - ), ( - x[0].dec - if x[0].dec < 90.0 * units.deg - else x[0].dec - 180.0 * units.deg - ) - - def sanitize_nonscalar(x): - return x.ra if x.ra < 180.0 * units.deg else x.ra - 360.0 * units.deg, ( - x.dec if x.dec < 90.0 * units.deg else x.dec - 180.0 * units.deg - ) - - try: - tr = sanitize(top_right) - bl = sanitize(bottom_left) - except TypeError: - tr = sanitize_nonscalar(top_right) - bl = sanitize_nonscalar(bottom_left) - - return { - "bounding_left": bl[0].value, - "bounding_right": tr[0].value, - "bounding_top": tr[1].value, - "bounding_bottom": bl[1].value, - } - - def calculate_tile_size(self) -> tuple[int, int]: - # Need to figure out how big the whole 'map' is, i.e. moving it up - # so that it fills the whole space. - wcs = self.get_wcs() - - scale = wcs.proj_plane_pixel_scales() - scale_x_deg = scale[0] - scale_y_deg = scale[1] - - # The full sky spans 360 deg in RA, 180 deg in Dec - map_size_x = int(math.floor(360 * units.deg / scale_x_deg)) - map_size_y = int(math.floor(180 * units.deg / scale_y_deg)) - - max_size = max(map_size_x, map_size_y) - - # See if 256 fits. - if (map_size_x % 256 == 0) and (map_size_y % 256 == 0): - tile_size = 256 - number_of_levels = int(math.log2(max_size // 256)) - return tile_size, number_of_levels - - # Oh no, remove all the powers of two until - # we get an odd number. - this_tile_size = map_size_y - - # Also don't make it too small. - while this_tile_size % 2 == 0 and this_tile_size > 512: - this_tile_size = this_tile_size // 2 - - number_of_levels = int(math.log2(max_size // this_tile_size)) - tile_size = this_tile_size - - return tile_size, number_of_levels - - def get_wcs(self) -> WCS: - with fits.open(self.filename) as h: - return WCS(h[self.hdu].header) - - class Layer(AuthenticatedModel): layer_id: str name: str diff --git a/tilemaker/metadata/fits.py b/tilemaker/metadata/fits.py new file mode 100644 index 0000000..d731862 --- /dev/null +++ b/tilemaker/metadata/fits.py @@ -0,0 +1,110 @@ +""" +FITS-based layer provider implementation. +""" + +import math +from pathlib import Path +from typing import Literal + +from astropy import units +from astropy.io import fits +from astropy.wcs import WCS +from pydantic import BaseModel + + +class LayerProvider(BaseModel): + """Base class for layer providers.""" + + provider_type: Literal["fits"] = "fits" + + def get_bbox(self) -> dict[str, float]: + """Get the bounding box of the provider.""" + return + + +class FITSLayerProvider(LayerProvider): + """FITS file-based layer provider.""" + + provider_type: Literal["fits"] = "fits" + filename: Path + hdu: int = 0 + index: int | None = None + + def get_bbox(self) -> dict[str, float]: + """Extract bounding box from FITS file header.""" + with fits.open(self.filename) as handle: + data = handle[self.hdu] + wcs = WCS(header=data.header) + + top_right = wcs.array_index_to_world(*[0] * data.header.get("NAXIS", 2)) + bottom_left = wcs.array_index_to_world(*[x - 1 for x in data.data.shape]) + + def sanitize(x): + return ( + x[0].ra + if x[0].ra < 180.0 * units.deg + else x[0].ra - 360.0 * units.deg + ), ( + x[0].dec + if x[0].dec < 90.0 * units.deg + else x[0].dec - 180.0 * units.deg + ) + + def sanitize_nonscalar(x): + return x.ra if x.ra < 180.0 * units.deg else x.ra - 360.0 * units.deg, ( + x.dec if x.dec < 90.0 * units.deg else x.dec - 180.0 * units.deg + ) + + try: + tr = sanitize(top_right) + bl = sanitize(bottom_left) + except TypeError: + tr = sanitize_nonscalar(top_right) + bl = sanitize_nonscalar(bottom_left) + + return { + "bounding_left": bl[0].value, + "bounding_right": tr[0].value, + "bounding_top": tr[1].value, + "bounding_bottom": bl[1].value, + } + + def calculate_tile_size(self) -> tuple[int, int]: + """Calculate appropriate tile size based on FITS file properties.""" + # Need to figure out how big the whole 'map' is, i.e. moving it up + # so that it fills the whole space. + wcs = self.get_wcs() + + scale = wcs.proj_plane_pixel_scales() + scale_x_deg = scale[0] + scale_y_deg = scale[1] + + # The full sky spans 360 deg in RA, 180 deg in Dec + map_size_x = int(math.floor(360 * units.deg / scale_x_deg)) + map_size_y = int(math.floor(180 * units.deg / scale_y_deg)) + + max_size = max(map_size_x, map_size_y) + + # See if 256 fits. + if (map_size_x % 256 == 0) and (map_size_y % 256 == 0): + tile_size = 256 + number_of_levels = int(math.log2(max_size // 256)) + return tile_size, number_of_levels + + # Oh no, remove all the powers of two until + # we get an odd number. + this_tile_size = map_size_y + + # Also don't make it too small. + while this_tile_size % 2 == 0 and this_tile_size > 512: + this_tile_size = this_tile_size // 2 + + number_of_levels = int(math.log2(max_size // this_tile_size)) + tile_size = this_tile_size + + return tile_size, number_of_levels + + def get_wcs(self) -> WCS: + """Get the WCS object from the FITS file.""" + with fits.open(self.filename) as h: + return WCS(h[self.hdu].header) diff --git a/tilemaker/metadata/orm.py b/tilemaker/metadata/orm.py new file mode 100644 index 0000000..f84b6f1 --- /dev/null +++ b/tilemaker/metadata/orm.py @@ -0,0 +1,152 @@ +""" +SQLAlchemy ORM models for the metadata database. +""" + +from sqlalchemy import ( + JSON, + Column, + Float, + ForeignKey, + Integer, + String, +) +from sqlalchemy.orm import DeclarativeBase, relationship + + +class Base(DeclarativeBase): + pass + + +class MapGroupORM(Base): + __tablename__ = "map_groups" + + id = Column(Integer, primary_key=True) + name = Column(String, nullable=False) + description = Column(String) + grant = Column(String) + + maps = relationship( + "MapORM", + back_populates="map_group", + cascade="all, delete-orphan", + passive_deletes=True, + ) + + +class MapORM(Base): + __tablename__ = "maps" + + id = Column(Integer, primary_key=True) + map_id = Column(String, unique=True, nullable=False) + name = Column(String, nullable=False) + description = Column(String) + grant = Column(String) + map_group_id = Column( + Integer, ForeignKey("map_groups.id", ondelete="CASCADE"), nullable=False + ) + + map_group = relationship("MapGroupORM", back_populates="maps") + bands = relationship( + "BandORM", + back_populates="map", + cascade="all, delete-orphan", + passive_deletes=True, + ) + + +class BandORM(Base): + __tablename__ = "bands" + + id = Column(Integer, primary_key=True) + band_id = Column(String, nullable=False) + name = Column(String, nullable=False) + description = Column(String) + grant = Column(String) + map_id = Column(Integer, ForeignKey("maps.id", ondelete="CASCADE"), nullable=False) + + map = relationship("MapORM", back_populates="bands") + layers = relationship( + "LayerORM", + back_populates="band", + cascade="all, delete-orphan", + passive_deletes=True, + ) + + +class LayerORM(Base): + __tablename__ = "layers" + + id = Column(Integer, primary_key=True) + layer_id = Column(String, unique=True, nullable=False) + name = Column(String, nullable=False) + description = Column(String) + grant = Column(String) + band_id = Column( + Integer, ForeignKey("bands.id", ondelete="CASCADE"), nullable=False + ) + + quantity = Column(String) + units = Column(String) + + number_of_levels = Column(Integer) + tile_size = Column(Integer) + + vmin = Column(String) # Can be float or 'auto' + vmax = Column(String) # Can be float or 'auto' + cmap = Column(String) + + # Provider information stored as JSON + provider = Column(JSON, nullable=False) + + # Bounding box + bounding_left = Column(Float) + bounding_right = Column(Float) + bounding_top = Column(Float) + bounding_bottom = Column(Float) + + band = relationship("BandORM", back_populates="layers") + + +class BoxORM(Base): + __tablename__ = "boxes" + + id = Column(Integer, primary_key=True) + name = Column(String, nullable=False) + description = Column(String) + top_left_ra = Column(Float, nullable=False) + top_left_dec = Column(Float, nullable=False) + bottom_right_ra = Column(Float, nullable=False) + bottom_right_dec = Column(Float, nullable=False) + grant = Column(String) + + +class SourceGroupORM(Base): + __tablename__ = "source_groups" + + id = Column(Integer, primary_key=True) + source_group_id = Column(String, unique=True, nullable=False) + name = Column(String, nullable=False) + description = Column(String) + grant = Column(String) + + sources = relationship( + "SourceORM", + back_populates="source_group", + cascade="all, delete-orphan", + passive_deletes=True, + ) + + +class SourceORM(Base): + __tablename__ = "sources" + + id = Column(Integer, primary_key=True) + name = Column(String) + ra = Column(Float, nullable=False) + dec = Column(Float, nullable=False) + extra = Column(JSON) + source_group_id = Column( + Integer, ForeignKey("source_groups.id", ondelete="CASCADE"), nullable=False + ) + + source_group = relationship("SourceGroupORM", back_populates="sources") diff --git a/tilemaker/settings.py b/tilemaker/settings.py index 55e84e2..8526dc6 100644 --- a/tilemaker/settings.py +++ b/tilemaker/settings.py @@ -10,7 +10,7 @@ class Settings(BaseSettings): - config_path: Path = "config.json" + config_path: str | Path = "config.json" origins: list[str] | None = ["*"] add_cors: bool = True @@ -150,8 +150,12 @@ def setup_app(self, app: FastAPI): def parse_config(self): from tilemaker.metadata.core import parse_config + from tilemaker.metadata.database import DatabaseDataConfiguration - return parse_config(self.config_path) + if "sqlite://" in str(self.config_path): + return DatabaseDataConfiguration(database_url=self.config_path) + + return parse_config(Path(self.config_path)) settings = Settings()