diff --git a/CHANGELOG.md b/CHANGELOG.md index 9316e23cb..985b17d33 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ - formally support Python 3.12 (#1082) - fix Windows-specific character encoding issue when reading XML files (#1084) +- resolve pandas and gdal future warnings (#1089) +- use spawn instead of fork for multiprocessing to resolve Python 3.12 deprecation warning (#1089) - rename add_node_elevations_google function's max_locations_per_batch parameter, with deprecation warning (#1088) - move add_node_elevations_google function's url_template parameter to settings module, with deprecation warning (#1088) diff --git a/osmnx/elevation.py b/osmnx/elevation.py index cecf23cf9..c2c014cb3 100644 --- a/osmnx/elevation.py +++ b/osmnx/elevation.py @@ -139,6 +139,7 @@ def add_node_elevations_raster(G, filepath, band=1, cpus=None): filepaths = [str(p) for p in filepath] sha = sha1(str(filepaths).encode("utf-8")).hexdigest() filepath = f"./.osmnx_{sha}.vrt" + gdal.UseExceptions() gdal.BuildVRT(filepath, filepaths).FlushCache() nodes = utils_graph.graph_to_gdfs(G, edges=False, node_geometry=False)[["x", "y"]] @@ -148,11 +149,8 @@ def add_node_elevations_raster(G, filepath, band=1, cpus=None): # divide nodes into equal-sized chunks for multiprocessing size = int(np.ceil(len(nodes) / cpus)) args = ((nodes.iloc[i : i + size], filepath, band) for i in range(0, len(nodes), size)) - pool = mp.Pool(cpus) - sma = pool.starmap_async(_query_raster, args) - results = sma.get() - pool.close() - pool.join() + with mp.get_context("spawn").Pool(cpus) as pool: + results = pool.starmap_async(_query_raster, args).get() elevs = {k: v for kv in results for k, v in kv} assert len(G) == len(elevs) diff --git a/osmnx/features.py b/osmnx/features.py index d47808fd9..2cb4b663a 100644 --- a/osmnx/features.py +++ b/osmnx/features.py @@ -258,7 +258,7 @@ def features_from_place(query, tags, which_result=None, buffer_dist=None): """ if buffer_dist is not None: warn( - "The buffer_dist argument as been deprecated and will be removed " + "The buffer_dist argument has been deprecated and will be removed " "in a future release. Buffer your query area directly, if desired.", stacklevel=2, ) diff --git a/osmnx/geocoder.py b/osmnx/geocoder.py index 6064dc96d..02d29ff1a 100644 --- a/osmnx/geocoder.py +++ b/osmnx/geocoder.py @@ -100,7 +100,7 @@ def geocode_to_gdf(query, which_result=None, by_osmid=False, buffer_dist=None): """ if buffer_dist is not None: warn( - "The buffer_dist argument as been deprecated and will be removed " + "The buffer_dist argument has been deprecated and will be removed " "in a future release. Buffer your results directly, if desired.", stacklevel=2, ) diff --git a/osmnx/graph.py b/osmnx/graph.py index e7241a774..2ace60637 100644 --- a/osmnx/graph.py +++ b/osmnx/graph.py @@ -348,7 +348,7 @@ def graph_from_place( """ if buffer_dist is not None: warn( - "The buffer_dist argument as been deprecated and will be removed " + "The buffer_dist argument has been deprecated and will be removed " "in a future release. Buffer your query area directly, if desired.", stacklevel=2, ) diff --git a/osmnx/routing.py b/osmnx/routing.py index 3b23ac974..42811a0f0 100644 --- a/osmnx/routing.py +++ b/osmnx/routing.py @@ -75,11 +75,8 @@ def shortest_path(G, orig, dest, weight="length", cpus=1): # if multi-threading, calculate shortest paths in parallel else: args = ((G, o, d, weight) for o, d in zip(orig, dest)) - pool = mp.Pool(cpus) - sma = pool.starmap_async(_single_shortest_path, args) - paths = sma.get() - pool.close() - pool.join() + with mp.get_context("spawn").Pool(cpus) as pool: + paths = pool.starmap_async(_single_shortest_path, args).get() return paths diff --git a/osmnx/simplification.py b/osmnx/simplification.py index a2c457bba..e5e85d53e 100644 --- a/osmnx/simplification.py +++ b/osmnx/simplification.py @@ -505,10 +505,11 @@ def _consolidate_intersections_rebuild_graph(G, tolerance=10, reconnect_edges=Tr # STEP 2 # attach each node to its cluster of merged nodes. first get the original # graph's node points then spatial join to give each node the label of - # cluster it's within + # cluster it's within. make cluster labels type string. node_points = utils_graph.graph_to_gdfs(G, edges=False)[["geometry"]] gdf = gpd.sjoin(node_points, node_clusters, how="left", predicate="within") gdf = gdf.drop(columns="geometry").rename(columns={"index_right": "cluster"}) + gdf["cluster"] = gdf["cluster"].astype(str) # STEP 3 # if a cluster contains multiple components (i.e., it's not connected)