Skip to content

Commit

Permalink
Add cython-lint pre-commit hook (#623)
Browse files Browse the repository at this point in the history
  • Loading branch information
adityagoel4512 committed Mar 28, 2023
1 parent da7c85f commit fe8e62e
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,9 @@ repos:
hooks:
- id: pyupgrade-conda
exclude: ^src/glum_benchmarks/orig_sklearn_fork/
- repo: https://github.com/Quantco/pre-commit-mirrors-cython-lint
rev: 0.14.0
hooks:
- id: cython-lint-conda
args: [--no-pycodestyle]
- id: double-quote-cython-strings-conda
6 changes: 4 additions & 2 deletions src/glum/_cd_fast.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ cimport numpy as np
import numpy as np
from numpy.math cimport INFINITY

cimport cython
from cython cimport floating
from cython.parallel import prange
import warnings
Expand Down Expand Up @@ -55,6 +54,7 @@ cdef inline floating fsign(floating f) nogil:
else:
return -1.0


def identify_active_rows(
floating[::1] gradient_rows,
floating[::1] hessian_rows,
Expand Down Expand Up @@ -89,6 +89,7 @@ def identify_active_rows(

return hessian_rows_diff_arr, active_rows_arr


def enet_coordinate_descent_gram(int[::1] active_set,
floating[::1] w,
floating[::1] P1,
Expand All @@ -111,7 +112,6 @@ def enet_coordinate_descent_gram(int[::1] active_set,

# get the data information into easy vars
cdef unsigned int n_active_features = active_set.shape[0]
cdef unsigned int n_features = Q.shape[0]

cdef floating w_ii
cdef floating P1_ii
Expand Down Expand Up @@ -197,6 +197,7 @@ def enet_coordinate_descent_gram(int[::1] active_set,

return np.asarray(w), norm_min_subgrad, max_min_subgrad, tol, n_iter + 1


cdef void cython_norm_min_subgrad(
int[::1] active_set,
floating[::1] coef,
Expand Down Expand Up @@ -268,6 +269,7 @@ cdef void cython_norm_min_subgrad(
if absterm > max_out[0]:
max_out[0] = absterm


def _norm_min_subgrad(
int[::1] active_set,
floating[::1] coef,
Expand Down
7 changes: 3 additions & 4 deletions src/glum/_functions.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ cdef fused const_floating1d:
const float[:]
const double[:]


# NOTE: Here and below, the factor argument is left for last. That's because it
# will be a single floating point value being passed from Python. In Python,
# all floating point values are 64-bit. As a result, if it's the first
Expand All @@ -38,7 +39,6 @@ def normal_identity_eta_mu_deviance(
):
cdef int n = cur_eta.shape[0]
cdef int i
cdef floating unit_deviance
cdef floating deviance = 0.0
for i in prange(n, nogil=True):
eta_out[i] = cur_eta[i] + factor * X_dot_d[i]
Expand All @@ -49,6 +49,7 @@ def normal_identity_eta_mu_deviance(
deviance += weights[i] * (y[i] - mu_out[i]) ** 2
return deviance


def normal_identity_rowwise_gradient_hessian(
const_floating1d y,
const_floating1d weights,
Expand All @@ -65,6 +66,7 @@ def normal_identity_rowwise_gradient_hessian(
# the default calculation in _distribution.py
hessian_rows_out[i] = weights[i]


def normal_log_likelihood(
const_floating1d y,
const_floating1d weights,
Expand Down Expand Up @@ -110,7 +112,6 @@ def poisson_log_eta_mu_deviance(
):
cdef int n = cur_eta.shape[0]
cdef int i
cdef floating unit_deviance
cdef floating deviance = 0.0
for i in prange(n, nogil=True):
eta_out[i] = cur_eta[i] + factor * X_dot_d[i]
Expand Down Expand Up @@ -180,7 +181,6 @@ def gamma_log_eta_mu_deviance(
):
cdef int n = cur_eta.shape[0]
cdef int i
cdef floating unit_deviance
cdef floating deviance = 0.0
for i in prange(n, nogil=True):
eta_out[i] = cur_eta[i] + factor * X_dot_d[i]
Expand Down Expand Up @@ -252,7 +252,6 @@ def tweedie_log_eta_mu_deviance(
):
cdef int n = cur_eta.shape[0]
cdef int i
cdef floating unit_deviance
cdef floating deviance = 0.0
cdef floating mu1mp
for i in prange(n, nogil=True):
Expand Down

0 comments on commit fe8e62e

Please sign in to comment.