diff --git a/.github/workflows/linting_and_testing.yml b/.github/workflows/linting_and_testing.yml index dc2ed2fe..9570a3bb 100644 --- a/.github/workflows/linting_and_testing.yml +++ b/.github/workflows/linting_and_testing.yml @@ -1,7 +1,20 @@ name: Linting and Testing on: - [push] + push: + branches: + - master + paths: + - '**.py' + - .github/workflows/linting_and_testing.yml + + pull_request: + branches: + - master + paths: + - '**.py' + - .github/workflows/linting_and_testing.yml + workflow_dispatch: jobs: linting: @@ -21,29 +34,42 @@ jobs: testing: - name: Testing - runs-on: ubuntu-24.04 + name: Testing${{ matrix.os }} python ${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ${{ fromJSON(vars.BUILD_OS)}} + python-version: ${{ fromJSON(vars.PYTHON_VERSIONS)}} + steps: - uses: actions/checkout@v4 - - name: Install GDAL - run: | - sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable - sudo apt-get update - sudo apt-get install -y libgdal-dev gdal-bin + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: ${{ matrix.python-version }} + conda-remove-defaults: "true" - - name: Install dependencies + + - name: Install dependencies for windows python 3.10 + if: ${{ matrix.os == 'windows-latest' && matrix.python-version == '3.10' }} run: | - conda update -n base -c defaults conda -y - conda install -n base conda-libmamba-solver -c conda-forge -y - conda install -c conda-forge gdal -y - conda install -c conda-forge -c loop3d --file dependencies.txt -y - conda install pytest -y + conda run -n test conda info + conda run -n test conda install -c loop3d -c conda-forge "gdal=3.4.3" python=${{ matrix.python-version }} -y + conda run -n test conda install -c loop3d -c conda-forge --file dependencies.txt python=${{ matrix.python-version }} -y + conda run -n test conda install pytest python=${{ matrix.python-version }} -y + - name: Install dependencies for other environments + if: ${{ matrix.os != 'windows-latest' || matrix.python-version != '3.10' }} + run: | + conda run -n test conda info + conda run -n test conda install -c loop3d -c conda-forge gdal python=${{ matrix.python-version }} -y + conda run -n test conda install -c loop3d -c conda-forge --file dependencies.txt python=${{ matrix.python-version }} -y + conda run -n test conda install pytest python=${{ matrix.python-version }} -y + - name: Install map2loop run: | - python -m pip install . + conda run -n test python -m pip install . - name: Run tests run: | - pytest - + conda run -n test pytest \ No newline at end of file diff --git a/dependencies.txt b/dependencies.txt index 7a1d95e7..76f0f574 100644 --- a/dependencies.txt +++ b/dependencies.txt @@ -1,10 +1,9 @@ numpy scipy geopandas -shapely +shapely>=2 networkx owslib -map2model loopprojectfile==0.2.2 beartype pytest diff --git a/map2loop/__init__.py b/map2loop/__init__.py index d7ccac11..8723f4ef 100644 --- a/map2loop/__init__.py +++ b/map2loop/__init__.py @@ -30,7 +30,7 @@ class DependencyChecker: def __init__(self, package_name, dependency_file="dependencies.txt"): self.package_name = package_name - self.dependency_file = pathlib.Path(__file__).parent / dependency_file + self.dependency_file = pathlib.Path(__file__).parent.parent / dependency_file self.required_version = self.get_required_version() self.installed_version = self.get_installed_version() @@ -93,7 +93,7 @@ def check_version(self): def check_all_dependencies(dependency_file="dependencies.txt"): - dependencies_path = pathlib.Path(__file__).parent / dependency_file + dependencies_path = pathlib.Path(__file__).parent.parent / dependency_file try: with dependencies_path.open("r") as file: for line in file: @@ -103,6 +103,8 @@ def check_all_dependencies(dependency_file="dependencies.txt"): if line: if "==" in line: package_name, _ = line.split("==") + elif ">=" in line: + package_name, _ = line.split(">=") else: package_name = line diff --git a/map2loop/_datasets/geodata_files/load_map2loop_data.py b/map2loop/_datasets/geodata_files/load_map2loop_data.py index d625af9b..d1c8be1f 100644 --- a/map2loop/_datasets/geodata_files/load_map2loop_data.py +++ b/map2loop/_datasets/geodata_files/load_map2loop_data.py @@ -1,8 +1,12 @@ import geopandas -from importlib.resources import files +import map2loop +import pathlib from osgeo import gdal gdal.UseExceptions() - +def map2loop_dir(folder)-> pathlib.Path: + path = pathlib.Path(map2loop.__file__).parent + path = path / "_datasets"/"geodata_files"/f'{folder}' + return path def load_hamersley_geology(): """ Loads Hamersley geology data from a shapefile @@ -14,8 +18,9 @@ def load_hamersley_geology(): Returns: geopandas.GeoDataFrame: The geology data """ - stream = files("map2loop._datasets.geodata_files.hamersley").joinpath("geology.geojson") - return geopandas.read_file(stream) + + path = map2loop_dir('hamersley') / "geology.geojson" + return geopandas.read_file(str(path)) def load_hamersley_structure(): @@ -30,8 +35,8 @@ def load_hamersley_structure(): geopandas.GeoDataFrame: The structure data """ - path = files("map2loop._datasets.geodata_files.hamersley").joinpath("structure.geojson") - return geopandas.read_file(path) + path = map2loop_dir('hamersley') / "structure.geojson" + return geopandas.read_file(str(path)) def load_hamersley_dtm(): @@ -41,5 +46,5 @@ def load_hamersley_dtm(): Returns: gdal.Dataset: The DTM data """ - path = files("map2loop._datasets.geodata_files.hamersley").joinpath("dtm_rp.tif") - return gdal.Open(path) + path = map2loop_dir('hamersley') / "dtm_rp.tif" + return gdal.Open(str(path)) diff --git a/map2loop/data_checks.py b/map2loop/data_checks.py index 6c7af1db..732464d8 100644 --- a/map2loop/data_checks.py +++ b/map2loop/data_checks.py @@ -41,7 +41,7 @@ def check_geology_fields_validity(mapdata) -> tuple[bool, str]: # 2. Validate geometry failed, message = validate_geometry( geodata=geology_data, - expected_geom_types=[shapely.Polygon, shapely.MultiPolygon], + expected_geom_types=[shapely.geometry.Polygon, shapely.geometry.MultiPolygon], datatype_name="GEOLOGY" ) if failed: diff --git a/map2loop/map2model_wrapper.py b/map2loop/map2model_wrapper.py index 08738e60..115b8702 100644 --- a/map2loop/map2model_wrapper.py +++ b/map2loop/map2model_wrapper.py @@ -2,14 +2,9 @@ from .m2l_enums import VerboseLevel # external imports -import map2model -import pandas -import numpy import geopandas as gpd import pandas as pd import numpy as np -import os -import re from .logging import getLogger @@ -37,7 +32,7 @@ class Map2ModelWrapper: """ def __init__( - self, map_data, mode: str = 'geopandas', verbose_level: VerboseLevel = VerboseLevel.NONE + self, map_data, *, verbose_level: VerboseLevel = VerboseLevel.NONE ): """ The initialiser for the map2model wrapper @@ -48,7 +43,6 @@ def __init__( verbose_level (VerboseLevel, optional): How much console output is sent. Defaults to VerboseLevel.ALL. """ - self.mode = mode self.sorted_units = None self._fault_fault_relationships = None self._unit_fault_relationships = None @@ -60,28 +54,22 @@ def __init__( @property def fault_fault_relationships(self): if self._fault_fault_relationships is None: - if self.mode == 'geopandas': - self._calculate_fault_fault_relationships() - else: - self.run() + self._calculate_fault_fault_relationships() + return self._fault_fault_relationships @property def unit_fault_relationships(self): if self._unit_fault_relationships is None: - if self.mode == 'geopandas': - self._calculate_fault_unit_relationships() - else: - self.run() + self._calculate_fault_unit_relationships() + return self._unit_fault_relationships @property def unit_unit_relationships(self): if self._unit_unit_relationships is None: - if self.mode == 'geopandas': - self._calculate_unit_unit_relationships() - else: - self.run() + self._calculate_unit_unit_relationships() + return self._unit_unit_relationships def reset(self): @@ -101,12 +89,8 @@ def get_sorted_units(self): Returns: list: The map2model stratigraphic column estimate """ - if self.mode == 'geopandas': - raise NotImplementedError("This method is not implemented") - else: - if self.sorted_units is None: - self.run() - return self.sorted_units + raise NotImplementedError("This method is not implemented") + def get_fault_fault_relationships(self): """ @@ -199,194 +183,8 @@ def run(self, verbose_level: VerboseLevel = None): verbose_level (VerboseLevel, optional): How much console output is sent. Defaults to None (which uses the wrapper attribute). """ - if self.mode == 'geopandas': - self.get_fault_fault_relationships() - self.get_unit_fault_relationships() - self.get_unit_unit_relationships() - return - else: - - if verbose_level is None: - verbose_level = self.verbose_level - logger.info("Exporting map data for map2model") - self.map_data.export_wkt_format_files() - logger.info("Running map2model...") - - map2model_code_map = { - "o": "ID", # FIELD_COORDINATES - "f": "FEATURE", # FIELD_FAULT_ID - "u": "CODE", # FIELD_POLYGON_LEVEL1_NAME - "g": "GROUP", # FIELD_POLYGON_LEVEL2_NAME - "min": "MIN_AGE", # FIELD_POLYGON_MIN_AGE - "max": "MAX_AGE", # FIELD_POLYGON_MAX_AGE - "c": "UNITNAME", # FIELD_POLYGON_CODE - "ds": "DESCRIPTION", # FIELD_POLYGON_DESCRIPTION - "r1": "ROCKTYPE1", # FIELD_POLYGON_ROCKTYPE1 - "r2": "ROCKTYPE2", # FIELD_POLYGON_ROCKTYPE2 - "msc": "", # FIELD_SITE_CODE - "mst": "", # FIELD_SITE_TYPE - "mscm": "", # FIELD_SITE_COMMO - "fold": self.map_data.config.fold_config["fold_text"], # FAULT_AXIAL_FEATURE_NAME - "sill": self.map_data.config.geology_config["sill_text"], # SILL_STRING - "intrusive": self.map_data.config.geology_config[ - "intrusive_text" - ], # IGNEOUS_STRING - "volcanic": self.map_data.config.geology_config["volcanic_text"], # VOLCANIC_STRING - "deposit_dist": 100, # deposit_dist - } - logger.info(f"map2model params: {map2model_code_map}") - # TODO: Simplify. Note: this is external so have to match fix to map2model module - logger.info(os.path.join(self.map_data.map2model_tmp_path, "map2model_data")) - logger.info( - os.path.join(self.map_data.map2model_tmp_path, "map2model_data", "geology_wkt.csv") - ) - logger.info( - os.path.join(self.map_data.map2model_tmp_path, "map2model_data", "faults_wkt.csv") - ) - logger.info(self.map_data.get_bounding_box()) - logger.info(map2model_code_map) - logger.info(verbose_level == VerboseLevel.NONE) - - run_log = map2model.run( - os.path.join(self.map_data.map2model_tmp_path), - os.path.join(self.map_data.map2model_tmp_path, "geology_wkt.csv"), - os.path.join(self.map_data.map2model_tmp_path, "faults_wkt.csv"), - "", - self.map_data.get_bounding_box(), - map2model_code_map, - verbose_level == VerboseLevel.NONE, - "None", - ) - # Parse fault intersections - out = [] - fault_fault_intersection_filename = os.path.join( - self.map_data.map2model_tmp_path, 'fault-fault-intersection.txt' - ) - logger.info(f"Reading fault-fault intersections from {fault_fault_intersection_filename}") - if ( - os.path.isfile(fault_fault_intersection_filename) - and os.path.getsize(fault_fault_intersection_filename) > 0 - ): - df = pandas.read_csv(fault_fault_intersection_filename, delimiter="{", header=None) - df[1] = list(df[1].str.replace("}", "", regex=False)) - df[1] = [re.findall("\(.*?\)", i) for i in df[1]] # Valid escape for regex - df[0] = list(df[0].str.replace("^[0-9]*, ", "", regex=True)) - df[0] = list(df[0].str.replace(", ", "", regex=False)) - - # df[0] = "Fault_" + df[0] #removed 7/10/24 as it seems to break the merge in - relations = df[1] - for j in range(len(relations)): - relations[j] = [i.strip("()").replace(" ", "").split(",") for i in relations[j]] - df[1] = relations - - for _, row in df.iterrows(): - for i in numpy.arange(len(row[1])): - - out += [[row[0], row[1][i][0], row[1][i][1], float(row[1][i][2])]] - - else: - logger.warning( - f"Fault-fault intersections file {fault_fault_intersection_filename} not found" - ) - logger.info("Parsing map2model output") - logger.info(run_log) - - logger.info("map2model complete") - - # Parse units sorted - units_sorted = pandas.read_csv( - os.path.join(self.map_data.map2model_tmp_path, "units_sorted.txt"), - header=None, - sep=' ', - ) - if units_sorted.shape == 0: - self.sorted_units = [] - else: - self.sorted_units = list(units_sorted[5]) - - # Parse fault intersections - out = [] - fault_fault_intersection_filename = os.path.join( - self.map_data.map2model_tmp_path, "fault-fault-intersection.txt" - ) - logger.info( - f"Reading fault-fault intersections from {fault_fault_intersection_filename}" - ) - if ( - os.path.isfile(fault_fault_intersection_filename) - and os.path.getsize(fault_fault_intersection_filename) > 0 - ): - df = pandas.read_csv(fault_fault_intersection_filename, delimiter="{", header=None) - df[1] = list(df[1].str.replace("}", "", regex=False)) - df[1] = [re.findall("\(.*?\)", i) for i in df[1]] # Valid escape for regex - df[0] = list(df[0].str.replace("^[0-9]*, ", "", regex=True)) - df[0] = list(df[0].str.replace(", ", "", regex=False)) - # df[0] = "Fault_" + df[0] #removed 7/10/24 as it seems to break the merge in - relations = df[1] - for j in range(len(relations)): - relations[j] = [i.strip("()").replace(" ", "").split(",") for i in relations[j]] - df[1] = relations - - for _, row in df.iterrows(): - for i in numpy.arange(len(row[1])): - out += [[row[0], row[1][i][0], row[1][i][1], float(row[1][i][2])]] - - else: - logger.warning( - f"Fault-fault intersections file {fault_fault_intersection_filename} not found" - ) - - df_out = pandas.DataFrame(columns=["Fault1", "Fault2", "Type", "Angle"], data=out) - logger.info('Fault intersections') - logger.info(df_out.to_string()) - self.fault_fault_relationships = df_out - - # Parse unit fault relationships - out = [] - unit_fault_intersection_filename = os.path.join( - self.map_data.map2model_tmp_path, "unit-fault-intersection.txt" - ) - if ( - os.path.isfile(unit_fault_intersection_filename) - and os.path.getsize(unit_fault_intersection_filename) > 0 - ): - df = pandas.read_csv(unit_fault_intersection_filename, header=None, sep='{') - df[1] = list(df[1].str.replace("}", "", regex=False)) - df[1] = df[1].astype(str).str.split(", ") - df[0] = list(df[0].str.replace("^[0-9]*, ", "", regex=True)) - df[0] = list(df[0].str.replace(", ", "", regex=False)) - - for _, row in df.iterrows(): - for i in numpy.arange(len(row[1])): - out += [[row[0], "Fault_" + row[1][i]]] - - df_out = pandas.DataFrame(columns=["Unit", "Fault"], data=out) - self.unit_fault_relationships = df_out - - # Parse unit unit relationships - units = [] - links = [] - graph_filename = os.path.join( - self.map_data.map2model_tmp_path, "graph_all_None.gml.txt" - ) - if os.path.isfile(graph_filename) and os.path.getsize(graph_filename) > 0: - with open( - os.path.join(self.map_data.map2model_tmp_path, "graph_all_None.gml.txt") - ) as file: - contents = file.read() - segments = contents.split("\n\n") - for line in segments[0].split("\n"): - units += [line.split(" ")] - for line in segments[1].split("\n")[:-1]: - links += [line.split(" ")] - - df = pandas.DataFrame(columns=["index", "unit"], data=units) - df.set_index("index", inplace=True) - out = [] - for row in links: - out += [[int(row[0]), df["unit"][row[0]], int(row[1]), df["unit"][row[1]]]] - df_out = pandas.DataFrame( - columns=["Index1", "UnitName1", "Index2", "UnitName2"], data=out - ) - self.unit_unit_relationships = df_out + self.get_fault_fault_relationships() + self.get_unit_fault_relationships() + self.get_unit_unit_relationships() + diff --git a/map2loop/mapdata.py b/map2loop/mapdata.py index b986258c..4137af27 100644 --- a/map2loop/mapdata.py +++ b/map2loop/mapdata.py @@ -53,7 +53,7 @@ class MapData: A string containing the projection e.g. "EPSG:28350" bounding_box: dict The bounding box in cartesian coordinates with 6 elements - bounding_box_polygon: shapely.Polygon + bounding_box_polygon: shapely.geometry.Polygon The bounding box in polygonal form bounding_box_str: str The bounding box in string form (used for url requests) @@ -183,7 +183,7 @@ def set_bounding_box(self, bounding_box): self.bounding_box_polygon = geopandas.GeoDataFrame( index=[0], crs=self.working_projection, - geometry=[shapely.Polygon(zip(lon_point_list, lat_point_list))], + geometry=[shapely.geometry.Polygon(zip(lon_point_list, lat_point_list))], ) self.recreate_bounding_box_str() @@ -210,7 +210,7 @@ def get_bounding_box(self, polygon: bool = False): polygon (bool, optional): Flag to get the bounding box in polygon form. Defaults to False. Returns: - dict or shapely.Polygon: The bounding box in the requested form + dict or shapely.geometry.Polygon: The bounding box in the requested form """ if polygon: return self.bounding_box_polygon diff --git a/map2loop/thickness_calculator.py b/map2loop/thickness_calculator.py index eb8a2a67..d7a9aad1 100644 --- a/map2loop/thickness_calculator.py +++ b/map2loop/thickness_calculator.py @@ -23,7 +23,7 @@ import geopandas import shapely import math - +from shapely.errors import UnsupportedGEOSVersionError class ThicknessCalculator(ABC): """ @@ -274,7 +274,7 @@ def compute( contacts = map_data.get_value_from_raster_df(Datatype.DTM, contacts) # update the geometry of the contact points to include the Z value contacts["geometry"] = contacts.apply( - lambda row: shapely.Point(row.geometry.x, row.geometry.y, row["Z"]), axis=1 + lambda row: shapely.geometry.Point(row.geometry.x, row.geometry.y, row["Z"]), axis=1 ) # spatial join the contact points with the basal contacts to get the unit for each contact point contacts = contacts.sjoin(basal_contacts, how="inner", predicate="intersects") @@ -302,7 +302,7 @@ def compute( interpolated = map_data.get_value_from_raster_df(Datatype.DTM, interpolated_orientations) # update the geometry of the interpolated points to include the Z value interpolated["geometry"] = interpolated.apply( - lambda row: shapely.Point(row.geometry.x, row.geometry.y, row["Z"]), axis=1 + lambda row: shapely.geometry.Point(row.geometry.x, row.geometry.y, row["Z"]), axis=1 ) # for each interpolated point, assign name of unit using spatial join units = map_data.get_map_data(Datatype.GEOLOGY) @@ -365,7 +365,11 @@ def compute( # calculate the length of the shortest line line_length = scipy.spatial.distance.euclidean(p1, p2) # find the indices of the points that are within 5% of the length of the shortest line - indices = shapely.dwithin(short_line, interp_points, line_length * 0.25) + try: + # GEOS 3.10.0+ + indices = shapely.dwithin(short_line, interp_points, line_length * 0.25) + except UnsupportedGEOSVersionError: + indices= numpy.array([shapely.distance(short_line[0],point)<= (line_length * 0.25) for point in interp_points]) # get the dip of the points that are within _dip = numpy.deg2rad(dip[indices]) _dips.append(_dip) @@ -531,7 +535,7 @@ def compute( # make a shapely point from the measurement measurement = sampled_structures.iloc[s] - measurement_pt = shapely.Point(measurement.X, measurement.Y) + measurement_pt = shapely.geometry.Point(measurement.X, measurement.Y) # find unit and strike litho_in = measurement['unit_name'] diff --git a/map2loop/utils.py b/map2loop/utils.py index 00e8fa57..c3ed7795 100644 --- a/map2loop/utils.py +++ b/map2loop/utils.py @@ -130,7 +130,7 @@ def create_points(xy: Union[list, tuple, numpy.ndarray]) -> numpy.ndarray: where each coordinate contains two elements representing the x and y coordinates of a point. Returns: - shapely.points: A list of Point objects created from the input list of coordinates. + shapely.geometry.Point: A list of Point objects created from the input list of coordinates. """ points = shapely.points(xy) return points @@ -138,7 +138,7 @@ def create_points(xy: Union[list, tuple, numpy.ndarray]) -> numpy.ndarray: @beartype.beartype def find_segment_strike_from_pt( - line: shapely.LineString, point: shapely.Point, measurement: pandas.Series + line: shapely.geometry.LineString, point: shapely.geometry.Point, measurement: pandas.Series ) -> float: """ Finds the strike of a line segment (contact) closest to a given point (structural measurement). @@ -154,7 +154,7 @@ def find_segment_strike_from_pt( lines = [] for c1, c2 in zip(line.coords, line.coords[1:]): - lines.append(shapely.LineString([c1, c2])) + lines.append(shapely.geometry.LineString([c1, c2])) distances = [segment.distance(point) for segment in lines] nearest_line = lines[distances.index(min(distances))] @@ -183,7 +183,7 @@ def find_segment_strike_from_pt( @beartype.beartype def calculate_endpoints( - start_point: shapely.Point, azimuth_deg: float, distance: int, bbox: pandas.DataFrame + start_point: shapely.geometry.Point, azimuth_deg: float, distance: int, bbox: pandas.DataFrame ) -> shapely.geometry.LineString: """ Calculate the endpoints of a line segment given a start point, azimuth angle, distance, and bounding box. @@ -195,7 +195,7 @@ def calculate_endpoints( bbox (dict): The bounding box coordinates (minx, miny, maxx, maxy). Returns: - shapely.LineString: A LineString object representing the line segment with endpoints clipped by the bounding box. + shapely.geometry.LineString: A LineString object representing the line segment with endpoints clipped by the bounding box. """ bbox = numpy.array(bbox)[0] minx, miny, maxx, maxy = bbox[0], bbox[1], bbox[2], bbox[3] @@ -216,7 +216,7 @@ def calculate_endpoints( dy_left = distance * math.sin(left_azimuth_rad) left_endpoint = (x + dx_left, y + dy_left) - line = shapely.LineString([left_endpoint, right_endpoint]) + line = shapely.geometry.LineString([left_endpoint, right_endpoint]) new_line = shapely.ops.clip_by_rect(line, minx, miny, maxx, maxy) @@ -236,11 +236,11 @@ def multiline_to_line( Returns: LineString: The converted line geometry. """ - if isinstance(geometry, shapely.LineString): + if isinstance(geometry, shapely.geometry.LineString): return geometry coords = [list(part.coords) for part in geometry.geoms] - flat_coords = [shapely.Point(*point) for segment in coords for point in segment] - return shapely.LineString(flat_coords) + flat_coords = [shapely.geometry.Point(*point) for segment in coords for point in segment] + return shapely.geometry.LineString(flat_coords) @beartype.beartype @@ -280,7 +280,7 @@ def rebuild_sampled_basal_contacts( if len(unique_segments) == 1: # make a linestring with all the points in subset - line = shapely.LineString(subset.geometry) + line = shapely.geometry.LineString(subset.geometry) r.append(line) else: @@ -289,12 +289,12 @@ def rebuild_sampled_basal_contacts( for featureId in unique_segments: seg_subset = subset[subset['featureId'] == featureId] if len(seg_subset) > 1: # Ensure each segment has at least two points - line_ = shapely.LineString(seg_subset.geometry.tolist()) + line_ = shapely.geometry.LineString(seg_subset.geometry.tolist()) lines.append(line_) # If multiple lines were created, combine them into a MultiLineString if lines: - line = shapely.MultiLineString(lines) + line = shapely.geometry.MultiLineString(lines) r.append(line) sampled_basal_contacts = geopandas.GeoDataFrame( @@ -380,7 +380,7 @@ def hex_to_rgb(hex_color: str) -> tuple: @beartype.beartype def calculate_minimum_fault_length( - bbox: dict[str, int | float], area_percentage: float + bbox: dict[str, Union[int, float]], area_percentage: float ) -> float: """ @@ -439,7 +439,7 @@ def read_hjson_with_json(file_path: str) -> dict: raise FileNotFoundError(f"HJSON file not found: {file_path}") from e except json.JSONDecodeError as e: raise ValueError(f"Failed to decode preprocessed HJSON as JSON: {e}") from e - + @beartype.beartype def update_from_legacy_file( filename: str, @@ -527,4 +527,4 @@ def update_from_legacy_file( with open(json_save_path, "w") as f: json.dump(parsed_data, f, indent=4) - return file_map \ No newline at end of file + return file_map diff --git a/tests/mapdata/test_mapdata_dipdir.py b/tests/mapdata/test_mapdata_dipdir.py index cc156253..e6107aea 100644 --- a/tests/mapdata/test_mapdata_dipdir.py +++ b/tests/mapdata/test_mapdata_dipdir.py @@ -28,7 +28,7 @@ def test_if_m2l_returns_all_sampled_structures_with_DIPDIR_lower_than_360(): # create mock data data = { - 'geometry': [shapely.Point(1, 1), shapely.Point(2, 2), shapely.Point(3, 3)], + 'geometry': [shapely.geometry.Point(1, 1), shapely.geometry.Point(2, 2), shapely.geometry.Point(3, 3)], 'DIPDIR': [45.0, 370.0, 420.0], 'DIP': [30.0, 60.0, 50], 'OVERTURNED': ["False", "True", "True"], diff --git a/tests/mapdata/test_set_get_recreate_bounding_box.py b/tests/mapdata/test_set_get_recreate_bounding_box.py index 5a6e9368..6e131c39 100644 --- a/tests/mapdata/test_set_get_recreate_bounding_box.py +++ b/tests/mapdata/test_set_get_recreate_bounding_box.py @@ -48,7 +48,7 @@ def test_bounding_box_polygon(md): expected_polygon = geopandas.GeoDataFrame( index=[0], crs=md.working_projection, - geometry=[shapely.Polygon(zip(lon_point_list, lat_point_list))], + geometry=[shapely.geometry.Polygon(zip(lon_point_list, lat_point_list))], ) assert md.bounding_box_polygon.equals( diff --git a/tests/sampler/test_SamplerSpacing.py b/tests/sampler/test_SamplerSpacing.py index e69c7650..017f2467 100644 --- a/tests/sampler/test_SamplerSpacing.py +++ b/tests/sampler/test_SamplerSpacing.py @@ -18,10 +18,10 @@ def sampler_spacing(): def correct_geodata(): data = { 'geometry': [ - shapely.LineString([(0, 0), (1, 1), (2, 2)]), - shapely.Polygon([(0, 0), (1, 1), (1, 0), (0, 0)]), - shapely.MultiLineString( - [shapely.LineString([(0, 0), (1, 1)]), shapely.LineString([(2, 2), (3, 3)])] + shapely.geometry.LineString([(0, 0), (1, 1), (2, 2)]), + shapely.geometry.Polygon([(0, 0), (1, 1), (1, 0), (0, 0)]), + shapely.geometry.MultiLineString( + [shapely.geometry.LineString([(0, 0), (1, 1)]), shapely.geometry.LineString([(2, 2), (3, 3)])] ), ], 'ID': ['1', '2', '3'], @@ -31,7 +31,7 @@ def correct_geodata(): @pytest.fixture def incorrect_geodata(): - data = {'geometry': [shapely.Point(0, 0), "Not a geometry"], 'ID': ['1', '2']} + data = {'geometry': [shapely.geometry.Point(0, 0), "Not a geometry"], 'ID': ['1', '2']} return pandas.DataFrame(data) @@ -54,7 +54,7 @@ def test_sample_function_incorrect_data(sampler_spacing, incorrect_geodata): def test_sample_function_target_less_than_or_equal_to_2(): sampler_spacing = SamplerSpacing(spacing=1.0) data = { - 'geometry': [shapely.LineString([(0, 0), (0, 1)]), shapely.LineString([(0, 0), (1, 0)])], + 'geometry': [shapely.geometry.LineString([(0, 0), (0, 1)]), shapely.geometry.LineString([(0, 0), (1, 0)])], 'ID': ['1', '2'], } gdf = geopandas.GeoDataFrame(data, geometry='geometry') diff --git a/tests/sampler/test_SamplerSpacing_featureId.py b/tests/sampler/test_SamplerSpacing_featureId.py index a84df370..73faaa3e 100644 --- a/tests/sampler/test_SamplerSpacing_featureId.py +++ b/tests/sampler/test_SamplerSpacing_featureId.py @@ -30,7 +30,7 @@ def test_featureId(): # check if in the right place for _, sample in corresponding_rows.iterrows(): - point = shapely.Point(sample['X'], sample['Y']).buffer(1) + point = shapely.geometry.Point(sample['X'], sample['Y']).buffer(1) assert point.intersects( poly.geometry ), f"Point from featureId 0 is not in the correct polygon segment of ID {poly['ID']}." @@ -48,7 +48,7 @@ def test_featureId(): polygon_samples = corresponding_rows[corresponding_rows['featureId'] == str(i)] print(polygon_samples) for _, sample in polygon_samples.iterrows(): - point = shapely.Point(sample['X'], sample['Y']).buffer( + point = shapely.geometry.Point(sample['X'], sample['Y']).buffer( 1 ) # buffer just to make sure assert point.intersects(