Skip to content

Commit

Permalink
Docstring improvements (#245)
Browse files Browse the repository at this point in the history
* Improving docstring formatting

* Improving docstring formatting

* Added pre-commit to the dev-requirements

* Pre-commit fixes

* Adding specificity to typehint

* Improved docstring (google napolean style) and typehints (pyright driven)

* The positional argument needs to stay

* Adding back unused var (required for dependency injection)

* Docstring improvements

* Adding a single period

* Cleaning up docstrings and some minor syntax

* Cleaning up docstrings, switching to import of numpy typing to avoid pyright issues

* Improving docstrings

* All minor docstring improvements

* Adding pydocstyle and docstring-quotes to Ruff config

* Update zarr.py

* Pre-commit w/ Ruff PyDocStyle

* Adding module level docstring

* Fixing pydocstyle errors

* Fixing docstring formatting for Ruff

* Docstring fixes for Ruff

* Docstring fixes for Ruff

* Setting up Ruff formatting (exlcuding docs/ and tests/)

* Docstring fixes for Ruff pydocstyle

* Fixed non-initialized plugin exception handling and added a test

* Only accepting List[APIRouter]

* Switching to pytest.raises
  • Loading branch information
xaviernogueira authored Dec 6, 2023
1 parent 3a4a047 commit e791d2b
Show file tree
Hide file tree
Showing 19 changed files with 456 additions and 241 deletions.
1 change: 1 addition & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ numcodecs
numpy
pluggy
pooch
pre-commit
pytest
pytest-mock
pytest-sugar
Expand Down
16 changes: 16 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ skip-string-normalization = true
select = [
"B", # flake8-bugbear
"C",
'D', # pydocstyle
"E", # pycodestyle
"F", # Pyflakes
"I", # isort
Expand All @@ -69,6 +70,9 @@ select = [
"B9",
]
ignore = [
"D100", # Missing docstring in public module
"D107", # Missing docstring in `__init__`
"D104", # Missing docstring in public package
# "E203",
# "E266",
"E501",
Expand All @@ -78,6 +82,10 @@ ignore = [
"C901",
]
line-length = 100
exclude = [
"tests/",
"docs/",
]

[tool.ruff.mccabe]
# Unlike Flake8, default to a complexity level of 10.
Expand Down Expand Up @@ -105,6 +113,14 @@ known-third-party = [
"zarr",
]

[tool.ruff.lint.pydocstyle]
# Use Google style docstrings
convention = "google"

[tool.ruff.lint.flake8-quotes]
inline-quotes = "single"
docstring-quotes = "double"

[tool.ruff.flake8-bugbear]
# Allow fastapi.Depends and other dependency injection style function arguments
extend-immutable-calls = ["fastapi.Depends", "fastapi.Query", "fastapi.Path"]
Expand Down
17 changes: 15 additions & 2 deletions tests/test_rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def get_dims(dataset: xr.Dataset = Depends(get_dataset)):


@pytest.fixture(scope='function')
def dataset_plugin(airtemp_ds):
def uninitialized_dataset_plugin(airtemp_ds):
class AirtempPlugin(Plugin):
name: str = 'airtemp'

Expand All @@ -75,7 +75,12 @@ def get_dataset(self, dataset_id: str):
def get_datasets(self):
return ['airtemp']

return AirtempPlugin()
return AirtempPlugin


@pytest.fixture(scope='function')
def dataset_plugin(uninitialized_dataset_plugin):
return uninitialized_dataset_plugin()


@pytest.fixture(scope='function')
Expand Down Expand Up @@ -199,6 +204,14 @@ def test_custom_dataset_plugin(airtemp_ds, dataset_plugin):
assert list(json_response['variables'].keys()) == list(airtemp_ds.variables.keys())


def test_uninitialized_plugin(uninitialized_dataset_plugin):
"""Checks for custom AttributeError message when plugin is not initialized."""
rest = Rest({})

with pytest.raises(AttributeError, match='Plugin'):
rest.register_plugin(uninitialized_dataset_plugin)


def test_custom_plugin_hooks_register(hook_spec_plugin, hook_implementation_plugin):
rest = Rest({})
rest.register_plugin(hook_implementation_plugin)
Expand Down
5 changes: 1 addition & 4 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@


class TestMapper(TestClient, BaseStore):
"""
A simple subclass to support getitem syntax on Starlette TestClient Objects
"""
"""A simple subclass to support getitem syntax on Starlette TestClient Objects"""

def __getitem__(self, key):
zarr_key = f'/zarr/{key}'
Expand Down Expand Up @@ -49,7 +47,6 @@ def create_dataset(
use_xy_dim=False,
):
"""Utility function for creating test data"""

if use_cftime:
end = xr.coding.cftime_offsets.to_cftime_datetime(end, calendar=calendar)
dates = xr.cftime_range(start=start, end=end, freq=freq, calendar=calendar)
Expand Down
1 change: 1 addition & 0 deletions xpublish/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Publish a Xarray Dataset through a rest API."""
try:
import importlib.metadata as importlib_metadata
except ImportError:
Expand Down
30 changes: 9 additions & 21 deletions xpublish/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,7 @@

@xr.register_dataset_accessor('rest')
class RestAccessor:
"""REST API Accessor for serving one dataset in its
dedicated FastAPI application.
"""
"""REST API Accessor for serving one dataset via a dedicated FastAPI app."""

def __init__(self, xarray_obj):
self._obj = xarray_obj
Expand All @@ -27,15 +24,13 @@ def _get_rest_obj(self):
def __call__(self, **kwargs):
"""Initialize this accessor by setting optional configuration values.
Parameters
----------
**kwargs
Arguments passed to :func:`xpublish.SingleDatasetRest.__init__`.
NOTE: This method can only be invoked once.
Notes
-----
This method can only be invoked once.
Args:
**kwargs: Arguments passed to :func:`xpublish.SingleDatasetRest.__init__`.
Returns:
The initialized accessor.
"""
if self._initialized:
raise RuntimeError('This accessor has already been initialized')
Expand All @@ -48,26 +43,19 @@ def __call__(self, **kwargs):
@property
def cache(self) -> cachey.Cache:
"""Returns the :class:`cachey.Cache` instance used by the FastAPI application."""

return self._get_rest_obj().cache

@property
def app(self) -> FastAPI:
"""Returns the :class:`fastapi.FastAPI` application instance."""

return self._get_rest_obj().app

def serve(self, **kwargs):
"""Serve this FastAPI application via :func:`uvicorn.run`.
Parameters
----------
**kwargs :
Arguments passed to :func:`xpublish.SingleDatasetRest.serve`.
Notes
-----
This method is blocking and does not return.
NOTE: This method is blocking and does not return.
Args:
**kwargs: Arguments passed to :func:`xpublish.SingleDatasetRest.serve`.
"""
self._get_rest_obj().serve(**kwargs)
46 changes: 31 additions & 15 deletions xpublish/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
"""
Helper functions to use a FastAPI dependencies.
"""
from typing import TYPE_CHECKING, Dict, List
"""Helper functions to use a FastAPI dependencies."""
from typing import (
TYPE_CHECKING,
Dict,
List,
)

import cachey
import pluggy
Expand All @@ -16,8 +18,7 @@


def get_dataset_ids() -> List[str]:
"""FastAPI dependency for getting the list of ids (string keys)
of the collection of datasets being served.
"""FastAPI dependency for getting the list of ids (string keys) of the collection of datasets being served.
Use this callable as dependency in any FastAPI path operation
function where you need access to those ids.
Expand All @@ -27,7 +28,6 @@ def get_dataset_ids() -> List[str]:
Returns:
A list of unique keys for datasets
"""
return [] # pragma: no cover

Expand Down Expand Up @@ -67,10 +67,18 @@ def get_cache() -> cachey.Cache:


def get_zvariables(
dataset: xr.Dataset = Depends(get_dataset), cache: cachey.Cache = Depends(get_cache)
):
"""FastAPI dependency that returns a dictionary of zarr encoded variables."""
dataset: xr.Dataset = Depends(get_dataset),
cache: cachey.Cache = Depends(get_cache),
) -> dict:
"""FastAPI dependency that returns a dictionary of zarr encoded variables.
Args:
dataset: The dataset to get the zvariables from.
cache: The cache to use for storing the zvariables.
Returns:
A dictionary of zarr encoded variables.
"""
cache_key = dataset.attrs.get(DATASET_ID_ATTR_KEY, '') + '/' + 'zvariables'
zvariables = cache.get(cache_key)

Expand All @@ -87,9 +95,17 @@ def get_zmetadata(
dataset: xr.Dataset = Depends(get_dataset),
cache: cachey.Cache = Depends(get_cache),
zvariables: dict = Depends(get_zvariables),
):
"""FastAPI dependency that returns a consolidated zmetadata dictionary."""
) -> dict:
"""FastAPI dependency that returns a consolidated zmetadata dictionary.
Args:
dataset: The dataset to get the zmetadata from.
cache: The cache to use for storing the zmetadata.
zvariables: The zvariables to use for creating the zmetadata.
Returns:
A consolidated zmetadata dictionary.
"""
cache_key = dataset.attrs.get(DATASET_ID_ATTR_KEY, '') + '/' + ZARR_METADATA_KEY
zmeta = cache.get(cache_key)

Expand All @@ -103,14 +119,14 @@ def get_zmetadata(


def get_plugins() -> Dict[str, 'Plugin']:
"""FastAPI dependency that returns the a dictionary of loaded plugins
"""FastAPI dependency that returns the a dictionary of loaded plugins.
Returns:
Dictionary of names to initialized plugins.
"""

return {} # pragma: no cover


def get_plugin_manager() -> pluggy.PluginManager:
"""Return the active plugin manager"""
"""Return the active plugin manager."""
...
25 changes: 12 additions & 13 deletions xpublish/plugins/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@


class Dependencies(BaseModel):
"""
A set of dependencies that are passed into plugin routers.
"""A set of dependencies that are passed into plugin routers.
Some routers may be 'borrowed' by other routers to expose different
geometries of data, thus the default dependencies may need to be overridden.
Expand Down Expand Up @@ -47,14 +46,12 @@ class Dependencies(BaseModel):
)

def __hash__(self):
"""Dependency functions aren't easy to hash"""
"""Dependency functions aren't easy to hash."""
return 0 # pragma: no cover


class Plugin(BaseModel):
"""
Xpublish plugins provide ways to extend the core of xpublish with
new routers and other functionality.
"""Xpublish plugins provide ways to extend the core of xpublish with new routers and other functionality.
To create a plugin, subclass `Plugin` and add attributes that are
subclasses of `PluginType` (`Router` for instance).
Expand All @@ -66,7 +63,7 @@ class Plugin(BaseModel):
name: str = Field(..., description='Fallback name of plugin')

def __hash__(self):
"""Make sure that the plugin is hashable to load with pluggy"""
"""Make sure that the plugin is hashable to load with pluggy."""
things_to_hash = []

# try/except is for pydantic backwards compatibility
Expand All @@ -84,7 +81,9 @@ def __hash__(self):
return hash(tuple(things_to_hash))

def __dir__(self) -> Iterable[str]:
"""We need to override the dir as pluggy will otherwise try to inspect it,
"""Overrides the dir.
We need to override the dir as pluggy will otherwise try to inspect it,
and Pydantic has marked it class only
https://github.com/pydantic/pydantic/pull/1466
Expand All @@ -97,15 +96,15 @@ def __dir__(self) -> Iterable[str]:


class PluginSpec(Plugin):
"""Plugin extension points
"""Plugin extension points.
Plugins do not need to implement all of the methods defined here,
instead they implement
"""

@hookspec
def app_router(self, deps: Dependencies) -> APIRouter: # type: ignore
"""Create an app (top-level) router for the plugin
"""Create an app (top-level) router for the plugin.
Implementations should return an APIRouter, and define
app_router_prefix, and app_router_tags on the class,
Expand All @@ -114,7 +113,7 @@ def app_router(self, deps: Dependencies) -> APIRouter: # type: ignore

@hookspec
def dataset_router(self, deps: Dependencies) -> APIRouter: # type: ignore
"""Create a dataset router for the plugin
"""Create a dataset router for the plugin.
Implementations should return an APIRouter, and define
dataset_router_prefix, and dataset_router_tags on the class,
Expand All @@ -123,7 +122,7 @@ def dataset_router(self, deps: Dependencies) -> APIRouter: # type: ignore

@hookspec
def get_datasets(self) -> Iterable[str]: # type: ignore
"""Return an iterable of dataset ids that the plugin can provide"""
"""Return an iterable of dataset ids that the plugin can provide."""

@hookspec(firstresult=True)
# type: ignore
Expand All @@ -135,4 +134,4 @@ def get_dataset(self, dataset_id: str) -> Optional[xr.Dataset]:

@hookspec
def register_hookspec(self): # type: ignore
"""Return additional hookspec class to register with the plugin manager"""
"""Return additional hookspec class to register with the plugin manager."""
Loading

0 comments on commit e791d2b

Please sign in to comment.