Skip to content

Commit

Permalink
chore: update Numpy and the matching syntax (#285)
Browse files Browse the repository at this point in the history
* chore: update numpy in .pre-commit-config.yaml

* fix: update syntax matching Numpy 1.21 for mypy

* chore: update Numpy to 1.14.5 in setup.cfg

* fix: change np.diag to np.diagonal
  • Loading branch information
amangoel185 authored Aug 10, 2021
1 parent d400c83 commit 967d0fe
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 45 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ repos:
- id: mypy
files: ^src
args: []
additional_dependencies: ["numpy==1.20.*", "matplotlib>=3.3", "boost-histogram~=1.0.1", "uhi~=0.3.0"]
additional_dependencies: ["numpy==1.21.*", "matplotlib>=3.3", "boost-histogram~=1.0.1", "uhi~=0.3.0"]

- repo: https://github.com/mgedmin/check-manifest
rev: "0.46"
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ packages = find:
install_requires =
boost-histogram~=1.1.0
histoprint>=1.6
numpy>=1.13.3
numpy>=1.14.5
typing_extensions;python_version<"3.8"
python_requires = >=3.7
include_package_data = True
Expand Down
12 changes: 7 additions & 5 deletions src/hist/basehist.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __init__(
*args: AxisProtocol | Storage | str | tuple[int, float, float],
storage: Storage | str | None = None,
metadata: Any = None,
data: np.ndarray | None = None,
data: np.typing.NDArray[Any] | None = None,
) -> None:
"""
Initialize BaseHist object. Axis params can contain the names.
Expand Down Expand Up @@ -355,12 +355,12 @@ def profile(self: T, axis: int | str) -> T:
retval[...] = np.stack([count, new_values, count * new_variances], axis=-1)
return retval

def density(self) -> np.ndarray:
def density(self) -> np.typing.NDArray[Any]:
"""
Density NumPy array.
"""
total = np.sum(self.values()) * functools.reduce(operator.mul, self.axes.widths)
dens: np.ndarray = self.values() / np.where(total > 0, total, 1)
dens: np.typing.NDArray[Any] = self.values() / np.where(total > 0, total, 1)
return dens

def show(self, **kwargs: Any) -> Any:
Expand Down Expand Up @@ -446,7 +446,9 @@ def plot2d_full(

def plot_ratio(
self,
other: hist.BaseHist | Callable[[np.ndarray], np.ndarray] | str,
other: hist.BaseHist
| Callable[[np.typing.NDArray[Any]], np.typing.NDArray[Any]]
| str,
*,
ax_dict: dict[str, matplotlib.axes.Axes] | None = None,
**kwargs: Any,
Expand All @@ -466,7 +468,7 @@ def plot_ratio(

def plot_pull(
self,
func: Callable[[np.ndarray], np.ndarray] | str,
func: Callable[[np.typing.NDArray[Any]], np.typing.NDArray[Any]] | str,
*,
ax_dict: dict[str, matplotlib.axes.Axes] | None = None,
**kwargs: Any,
Expand Down
20 changes: 11 additions & 9 deletions src/hist/intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def __dir__() -> tuple[str, ...]:


def poisson_interval(
values: np.ndarray,
variances: np.ndarray | None = None,
values: np.typing.NDArray[Any],
variances: np.typing.NDArray[Any] | None = None,
coverage: float | None = None,
) -> np.ndarray:
) -> np.typing.NDArray[Any]:
r"""
The Frequentist coverage interval for Poisson-distributed observations.
Expand Down Expand Up @@ -67,7 +67,7 @@ def poisson_interval(
scale = np.ones_like(values)
mask = np.isfinite(values) & (values != 0)
np.divide(variances, values, out=scale, where=mask)
counts = values / scale
counts: np.typing.NDArray[Any] = values / scale
interval_min = scale * stats.chi2.ppf((1 - coverage) / 2, 2 * counts) / 2.0
interval_min[values == 0.0] = 0.0 # chi2.ppf produces NaN for values=0
interval_max = (
Expand All @@ -79,8 +79,10 @@ def poisson_interval(


def clopper_pearson_interval(
num: np.ndarray, denom: np.ndarray, coverage: float | None = None
) -> np.ndarray:
num: np.typing.NDArray[Any],
denom: np.typing.NDArray[Any],
coverage: float | None = None,
) -> np.typing.NDArray[Any]:
r"""
Compute the Clopper-Pearson coverage interval for a binomial distribution.
c.f. http://en.wikipedia.org/wiki/Binomial_proportion_confidence_interval
Expand Down Expand Up @@ -112,8 +114,8 @@ def clopper_pearson_interval(


def ratio_uncertainty(
num: np.ndarray,
denom: np.ndarray,
num: np.typing.NDArray[Any],
denom: np.typing.NDArray[Any],
uncertainty_type: Literal["poisson", "poisson-ratio", "efficiency"] = "poisson",
) -> Any:
r"""
Expand Down Expand Up @@ -155,7 +157,7 @@ def ratio_uncertainty(
# Details: see https://github.com/scikit-hep/hist/issues/279
p_lim = clopper_pearson_interval(num, num + denom)
with np.errstate(divide="ignore", invalid="ignore"):
r_lim = p_lim / (1 - p_lim)
r_lim: np.typing.NDArray[Any] = p_lim / (1 - p_lim)
ratio_uncert = np.abs(r_lim - ratio)
elif uncertainty_type == "efficiency":
ratio_uncert = np.abs(clopper_pearson_interval(num, denom) - ratio)
Expand Down
53 changes: 31 additions & 22 deletions src/hist/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,11 @@ def _expr_to_lambda(expr: str) -> Callable[..., Any]:

def _curve_fit_wrapper(
func: Callable[..., Any],
xdata: np.ndarray,
ydata: np.ndarray,
yerr: np.ndarray,
xdata: np.typing.NDArray[Any],
ydata: np.typing.NDArray[Any],
yerr: np.typing.NDArray[Any],
likelihood: bool = False,
) -> tuple[tuple[float, ...], np.ndarray]:
) -> tuple[tuple[float, ...], np.typing.NDArray[Any]]:
"""
Wrapper around `scipy.optimize.curve_fit`. Initial parameters (`p0`)
can be set in the function definition with defaults for kwargs
Expand All @@ -156,7 +156,7 @@ def _curve_fit_wrapper(
from iminuit import Minuit
from scipy.special import gammaln

def fnll(v: Iterable[np.ndarray]) -> float:
def fnll(v: Iterable[np.typing.NDArray[Any]]) -> float:
ypred = func(xdata, *v)
if (ypred <= 0.0).any():
return 1e6
Expand Down Expand Up @@ -261,7 +261,7 @@ def plot2d_full(

def _construct_gaussian_callable(
__hist: hist.BaseHist,
) -> Callable[[np.ndarray], np.ndarray]:
) -> Callable[[np.typing.NDArray[Any]], np.typing.NDArray[Any]]:
x_values = __hist.axes[0].centers
hist_values = __hist.values()

Expand All @@ -272,13 +272,13 @@ def _construct_gaussian_callable(

# gauss is a closure that will get evaluated in _fit_callable_to_hist
def gauss(
x: np.ndarray,
x: np.typing.NDArray[Any],
constant: float = constant,
mean: float = mean,
sigma: float = sigma,
) -> np.ndarray:
# Note: Force np.ndarray type as numpy ufuncs have type "Any"
ret: np.ndarray = constant * np.exp(
) -> np.typing.NDArray[Any]:
# Note: Force np.typing.NDArray[Any] type as numpy ufuncs have type "Any"
ret: np.typing.NDArray[Any] = constant * np.exp(
-np.square(x - mean) / (2 * np.square(sigma))
)
return ret
Expand All @@ -287,10 +287,15 @@ def gauss(


def _fit_callable_to_hist(
model: Callable[[np.ndarray], np.ndarray],
model: Callable[[np.typing.NDArray[Any]], np.typing.NDArray[Any]],
histogram: hist.BaseHist,
likelihood: bool = False,
) -> tuple[np.ndarray, np.ndarray, np.ndarray, tuple[tuple[float, ...], np.ndarray]]:
) -> tuple[
np.typing.NDArray[Any],
np.typing.NDArray[Any],
np.typing.NDArray[Any],
tuple[tuple[float, ...], np.typing.NDArray[Any]],
]:
"""
Fit a model, a callable function, to the histogram values.
"""
Expand All @@ -312,7 +317,7 @@ def _fit_callable_to_hist(
n_samples = 100
vopts = np.random.multivariate_normal(popt, pcov, n_samples)
sampled_ydata = np.vstack([model(xdata, *vopt).T for vopt in vopts])
model_uncert = np.nanstd(sampled_ydata, axis=0)
model_uncert = np.nanstd(sampled_ydata, axis=0) # type: ignore
else:
model_uncert = np.zeros_like(hist_uncert)

Expand All @@ -321,8 +326,8 @@ def _fit_callable_to_hist(

def _plot_fit_result(
__hist: hist.BaseHist,
model_values: np.ndarray,
model_uncert: np.ndarray,
model_values: np.typing.NDArray[Any],
model_uncert: np.typing.NDArray[Any],
ax: matplotlib.axes.Axes,
eb_kwargs: dict[str, Any],
fp_kwargs: dict[str, Any],
Expand Down Expand Up @@ -361,8 +366,8 @@ def _plot_fit_result(

def plot_ratio_array(
__hist: hist.BaseHist,
ratio: np.ndarray,
ratio_uncert: np.ndarray,
ratio: np.typing.NDArray[Any],
ratio_uncert: np.typing.NDArray[Any],
ax: matplotlib.axes.Axes,
**kwargs: Any,
) -> RatioArtists:
Expand Down Expand Up @@ -435,7 +440,7 @@ def plot_ratio_array(
valid_ratios + ratio_uncert[1][valid_ratios_idx],
]
)
max_delta = np.max(np.abs(extrema - central_value))
max_delta = np.amax(np.abs(extrema - central_value))
ratio_extrema = np.abs(max_delta + central_value)

_alpha = 2.0
Expand All @@ -453,7 +458,7 @@ def plot_ratio_array(

def plot_pull_array(
__hist: hist.BaseHist,
pulls: np.ndarray,
pulls: np.typing.NDArray[Any],
ax: matplotlib.axes.Axes,
bar_kwargs: dict[str, Any],
pp_kwargs: dict[str, Any],
Expand Down Expand Up @@ -502,7 +507,9 @@ def plot_pull_array(

def _plot_ratiolike(
self: hist.BaseHist,
other: hist.BaseHist | Callable[[np.ndarray], np.ndarray] | str,
other: hist.BaseHist
| Callable[[np.typing.NDArray[Any]], np.typing.NDArray[Any]]
| str,
likelihood: bool = False,
*,
ax_dict: dict[str, matplotlib.axes.Axes] | None = None,
Expand Down Expand Up @@ -608,7 +615,7 @@ def _plot_ratiolike(
if fit_fmt is not None:
parnames = list(inspect.signature(other).parameters)[1:]
popt, pcov = bestfit_result
perr = np.sqrt(np.diag(pcov))
perr = np.sqrt(np.diagonal(pcov))

fp_label = "Fit"
for name, value, error in zip(parnames, popt, perr):
Expand Down Expand Up @@ -651,7 +658,9 @@ def _plot_ratiolike(
)

elif view == "pull":
pulls = (hist_values - compare_values) / hist_values_uncert
pulls: np.typing.NDArray[Any] = (
hist_values - compare_values
) / hist_values_uncert

pulls[np.isnan(pulls) | np.isinf(pulls)] = 0

Expand Down
14 changes: 7 additions & 7 deletions src/hist/svgplots.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import Callable
from typing import Any, Callable

import numpy as np
from boost_histogram.axis import Axis
Expand Down Expand Up @@ -70,8 +70,8 @@ def svg_hist_1d(h: hist.BaseHist) -> svg:
(edges,) = h.axes.edges
norm_edges = (edges - edges[0]) / (edges[-1] - edges[0])
density = h.density()
max_dens = np.max(density) or 1
norm_vals = density / max_dens
max_dens = np.amax(density) or 1
norm_vals: np.typing.NDArray[Any] = density / max_dens

arr = np.empty((2, len(norm_vals) * 2 + 2), dtype=float)
arr[0, 0:-1:2] = arr[0, 1::2] = width * norm_edges
Expand Down Expand Up @@ -119,8 +119,8 @@ def svg_hist_1d_c(h: hist.BaseHist) -> svg:
(edges,) = h.axes.edges
norm_edges = (edges - edges[0]) / (edges[-1] - edges[0]) * np.pi * 2
density = h.density()
max_dens = np.max(density) or 1
norm_vals = density / max_dens
max_dens = np.amax(density) or 1
norm_vals: np.typing.NDArray[Any] = density / max_dens

arr = np.empty((2, len(norm_vals) * 2), dtype=float)
arr[0, :-1:2] = arr[0, 1::2] = norm_edges[:-1]
Expand Down Expand Up @@ -153,8 +153,8 @@ def svg_hist_2d(h: hist.BaseHist) -> svg:
ey = -(e1 - e1[0]) / (e1[-1] - e1[0]) * height

density = h.density()
max_dens = np.max(density) or 1
norm_vals = density / max_dens
max_dens = np.amax(density) or 1
norm_vals: np.typing.NDArray[Any] = density / max_dens

boxes = []
for r, (up_edge, bottom_edge) in enumerate(zip(ey[:-1], ey[1:])):
Expand Down

0 comments on commit 967d0fe

Please sign in to comment.