Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add numpy to mypy check #753

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/check-code-quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:

- name: Install and run mypy
run: |
pip install mypy
pip install mypy numpy
mgovers marked this conversation as resolved.
Show resolved Hide resolved
mypy .

- name: Install and run pylint
Expand Down
8 changes: 5 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,23 @@

repos:
- repo: https://github.com/fsfe/reuse-tool
rev: v3.0.2
rev: v4.0.3
hooks:
- id: reuse
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 24.4.2
rev: 24.8.0
hooks:
- id: black-jupyter
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.10.0
rev: v1.11.2
hooks:
- id: mypy
additional_dependencies:
- numpy
- repo: local
hooks:
- id: pylint
Expand Down
8 changes: 0 additions & 8 deletions .reuse/dep5

This file was deleted.

14 changes: 14 additions & 0 deletions REUSE.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# SPDX-FileCopyrightText: Contributors to the Power Grid Model project <[email protected]>
#
# SPDX-License-Identifier: MPL-2.0

version = 1
SPDX-PackageName = "power-grid-model"
SPDX-PackageSupplier = "Power Grid Model project <[email protected]>"
SPDX-PackageDownloadLocation = "https://github.com/PowerGridModel/power-grid-model"

[[annotations]]
path = "./**"
precedence = "aggregate"
SPDX-FileCopyrightText = "Contributors to the Power Grid Model project <[email protected]>"
SPDX-License-Identifier = "MPL-2.0"
10 changes: 6 additions & 4 deletions src/power_grid_model/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def convert_batch_dataset_to_batch_list(batch_data: BatchDataset) -> BatchList:
if is_sparse(data):
component_batches = split_sparse_batch_data_in_batches(cast(SparseBatchData, data), component)
else:
component_batches = split_dense_batch_data_in_batches(data, component)
component_batches = split_dense_batch_data_in_batches(cast(SingleComponentData, data), component)
for i, batch in enumerate(component_batches):
if (isinstance(batch, dict) and batch) or (isinstance(batch, np.ndarray) and batch.size > 0):
list_data[i][component] = batch
Expand Down Expand Up @@ -151,7 +151,9 @@ def get_batch_size(batch_data: BatchComponentData) -> int:
return indptr.size - 1

data_to_check = (
next(iter(cast(DenseBatchColumnarData, batch_data).values())) if is_columnar(batch_data) else batch_data
next(iter(cast(DenseBatchColumnarData, batch_data).values()))
if is_columnar(batch_data)
else cast(DenseBatchArray, batch_data)
mgovers marked this conversation as resolved.
Show resolved Hide resolved
)
if data_to_check.ndim == 1:
return 1
Expand Down Expand Up @@ -397,7 +399,7 @@ def _convert_data_to_row_or_columnar(
if isinstance(attrs, (list, set)) and len(attrs) == 0:
return {}
if isinstance(attrs, ComponentAttributeFilterOptions):
names = cast(np.ndarray, data).dtype.names if not is_columnar(data) else data.keys()
names = cast(SingleArray, data).dtype.names if not is_columnar(data) else cast(SingleColumnarData, data).keys()
return {attr: deepcopy(data[attr]) for attr in names}
return {attr: deepcopy(data[attr]) for attr in attrs}

Expand Down Expand Up @@ -575,7 +577,7 @@ def _extract_columnar_data(data: ComponentData, is_batch: bool | None = None) ->
raise TypeError(not_columnar_data_message)
if attribute_array.ndim not in allowed_dims:
raise TypeError(not_columnar_data_message)
return sub_data
return cast(ColumnarData, sub_data)


def _extract_row_based_data(data: ComponentData, is_batch: bool | None = None) -> DataArray: # pragma: no cover
Expand Down
13 changes: 7 additions & 6 deletions src/power_grid_model/core/buffer_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
AttributeType,
ComponentData,
DenseBatchData,
IndexPointer,
SingleComponentData,
SparseBatchArray,
SparseBatchData,
Expand Down Expand Up @@ -153,7 +154,7 @@ def _get_uniform_buffer_properties(

if isinstance(data, np.ndarray):
ndim = data.ndim
shape: tuple[int] = data.shape
shape = data.shape
columns = None
elif data:
attribute, attribute_data = next(iter(data.items()))
Expand Down Expand Up @@ -317,7 +318,7 @@ def _get_attribute_buffer_views(


def _get_uniform_buffer_view(
data: np.ndarray,
data: DenseBatchData,
schema: ComponentMetaData,
is_batch: bool | None,
batch_size: int | None,
Expand Down Expand Up @@ -396,7 +397,7 @@ def get_buffer_view(
the C API buffer view.
"""
if not is_sparse(data):
return _get_uniform_buffer_view(data, schema, is_batch, batch_size)
return _get_uniform_buffer_view(cast(DenseBatchData, data), schema, is_batch, batch_size)

if is_batch is not None and not is_batch:
raise ValueError("Sparse data must be batch data")
Expand Down Expand Up @@ -463,13 +464,13 @@ def _create_sparse_buffer(properties: BufferProperties, schema: ComponentMetaDat
Returns:
A sparse buffer with the correct properties.
"""
data = _create_contents_buffer(
data: SingleComponentData = _create_contents_buffer(
shape=properties.n_total_elements,
dtype=schema.dtype,
columns=properties.columns,
)
indptr = np.array([0] * properties.batch_size + [properties.n_total_elements], dtype=IdxC)
return {"data": data, "indptr": indptr}
indptr: IndexPointer = np.array([0] * properties.batch_size + [properties.n_total_elements], dtype=IdxC)
return cast(SparseBatchData, {"data": data, "indptr": indptr})


def _create_contents_buffer(shape, dtype, columns: list[AttributeType] | None) -> SingleComponentData | DenseBatchData:
Expand Down
2 changes: 1 addition & 1 deletion src/power_grid_model/core/power_grid_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,6 +513,6 @@ def _get_filtered_attributes(
return None

if isinstance(component_data_filter, ComponentAttributeFilterOptions):
return list(schema.dtype.names)
return [] if schema.dtype.names is None else list(schema.dtype.names)

return list(component_data_filter)
3 changes: 2 additions & 1 deletion src/power_grid_model/core/power_grid_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
_str_to_datatype,
)
from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc
from power_grid_model.data_types import DenseBatchArray, SingleArray


# constant enum for ctype
Expand Down Expand Up @@ -171,7 +172,7 @@ def initialize_array(
component_type: ComponentTypeLike,
shape: tuple | int,
empty: bool = False,
) -> np.ndarray:
) -> SingleArray | DenseBatchArray:
"""
Initializes an array for use in Power Grid Model calculations

Expand Down
8 changes: 5 additions & 3 deletions src/power_grid_model/data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,38 +167,40 @@ class SparseBatchColumnarData(TypedDict):
Columnar data can be :class:`SingleColumnarData` or :class:`BatchColumnarData`.
"""

_SingleComponentData = TypeVar("_SingleComponentData", SingleArray, SingleColumnarData) # deduction helper
mgovers marked this conversation as resolved.
Show resolved Hide resolved
SingleComponentData = SingleArray | SingleColumnarData
"""
Single component data can be :class:`SingleArray` or :class:`SingleColumnarData`.
"""

_BatchComponentData = TypeVar("_BatchComponentData", BatchArray, BatchColumnarData) # deduction helper
BatchComponentData = BatchArray | BatchColumnarData
"""
Batch component data can be :class:`BatchArray` or :class:`BatchColumnarData`.
"""

_ComponentData = TypeVar("_ComponentData", SingleComponentData, BatchComponentData) # deduction helper
ComponentData = DataArray | ColumnarData
"""
Component data can be :class:`DataArray` or :class:`ColumnarData`.
"""

SingleDataset = dict[ComponentTypeVar, SingleComponentData]
SingleDataset = dict[ComponentTypeVar, _SingleComponentData]
"""
A single dataset is a dictionary where the keys are the component types and the values are
:class:`ComponentData`

- Example: {"node": :class:`SingleArray`, "line": :class:`SingleColumnarData`}
"""

BatchDataset = dict[ComponentTypeVar, BatchComponentData]
BatchDataset = dict[ComponentTypeVar, _BatchComponentData]
"""
A batch dataset is a dictionary where the keys are the component types and the values are :class:`BatchComponentData`

- Example: {"node": :class:`DenseBatchArray`, "line": :class:`SparseBatchArray`,
"link": :class:`DenseBatchColumnarData`, "transformer": :class:`SparseBatchColumnarData`}
"""

_ComponentData = TypeVar("_ComponentData", SingleComponentData, BatchComponentData) # deduction helper
Dataset = dict[ComponentTypeVar, _ComponentData]
"""
A general data set can be a :class:`SingleDataset` or a :class:`BatchDataset`.
Expand Down
6 changes: 4 additions & 2 deletions src/power_grid_model/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,9 @@
BatchComponentData,
BatchDataset,
Dataset,
DenseBatchArray,
IndexPointer,
SingleComponentData,
SingleDataset,
)
from power_grid_model.errors import PowerGridError, PowerGridSerializationError
Expand Down Expand Up @@ -69,7 +71,7 @@ def _get_dense_scenario(arr: np.ndarray) -> np.ndarray:
def _get_sparse_scenario(arr: np.ndarray, indptr: IndexPointer) -> np.ndarray:
return arr[indptr[scenario] : indptr[scenario + 1]]

def _get_component_scenario(component_scenarios: BatchComponentData) -> np.ndarray:
def _get_component_scenario(component_scenarios: BatchComponentData) -> SingleComponentData:
data = _extract_data_from_component_data(component_scenarios)

if is_sparse(component_scenarios):
Expand All @@ -83,7 +85,7 @@ def _get_component_scenario(component_scenarios: BatchComponentData) -> np.ndarr

if is_columnar(component_scenarios):
return {attribute: _get_dense_scenario(attribute_data) for attribute, attribute_data in data.items()}
return _get_dense_scenario(component_scenarios)
return _get_dense_scenario(cast_type(DenseBatchArray, component_scenarios))

return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()}

Expand Down
Loading