Skip to content

Commit

Permalink
Merge branch 'main' into enh-array_to_datetime-inference
Browse files Browse the repository at this point in the history
  • Loading branch information
jbrockmendel committed May 21, 2024
2 parents 4ff9b11 + 2aa155a commit cd9b735
Show file tree
Hide file tree
Showing 140 changed files with 2,200 additions and 2,955 deletions.
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: Purge caches once a week
name: Purge caches daily
on:
schedule:
# 4:10 UTC on Sunday
- cron: "10 4 * * 0"
# 4:10 UTC daily
- cron: "10 4 * * *"

jobs:
cleanup:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ jobs:
run: echo "sdist_name=$(cd ./dist && ls -d */)" >> "$GITHUB_ENV"

- name: Build wheels
uses: pypa/cibuildwheel@v2.17.0
uses: pypa/cibuildwheel@v2.18.0
with:
package-dir: ./dist/${{ startsWith(matrix.buildplat[1], 'macosx') && env.sdist_name || needs.build_sdist.outputs.sdist_file }}
env:
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ repos:
rev: v0.9.1
hooks:
- id: sphinx-lint
args: ["--enable", "all", "--disable", "line-too-long"]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v18.1.4
hooks:
Expand Down
191 changes: 0 additions & 191 deletions LICENSES/XARRAY_LICENSE

This file was deleted.

10 changes: 0 additions & 10 deletions asv_bench/benchmarks/io/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,16 +445,6 @@ def setup(self, engine):
data = data.format(*two_cols)
self.StringIO_input = StringIO(data)

def time_multiple_date(self, engine):
read_csv(
self.data(self.StringIO_input),
engine=engine,
sep=",",
header=None,
names=list(string.digits[:9]),
parse_dates=[[1, 2], [1, 3]],
)

def time_baseline(self, engine):
read_csv(
self.data(self.StringIO_input),
Expand Down
25 changes: 1 addition & 24 deletions asv_bench/benchmarks/io/parsers.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
import numpy as np

try:
from pandas._libs.tslibs.parsing import (
_does_string_look_like_datetime,
concat_date_cols,
)
from pandas._libs.tslibs.parsing import _does_string_look_like_datetime
except ImportError:
# Avoid whole benchmark suite import failure on asv (currently 0.4)
pass
Expand All @@ -20,21 +15,3 @@ def setup(self, value):
def time_check_datetimes(self, value):
for obj in self.objects:
_does_string_look_like_datetime(obj)


class ConcatDateCols:
params = ([1234567890, "AAAA"], [1, 2])
param_names = ["value", "dim"]

def setup(self, value, dim):
count_elem = 10000
if dim == 1:
self.object = (np.array([value] * count_elem),)
if dim == 2:
self.object = (
np.array([value] * count_elem),
np.array([value] * count_elem),
)

def time_check_concat(self, value, dim):
concat_date_cols(self.object)
16 changes: 10 additions & 6 deletions asv_bench/benchmarks/series_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,14 @@ def time_searchsorted(self, dtype):


class Map:
params = (["dict", "Series", "lambda"], ["object", "category", "int"])
param_names = "mapper"

def setup(self, mapper, dtype):
params = (
["dict", "Series", "lambda"],
["object", "category", "int"],
[None, "ignore"],
)
param_names = ["mapper", "dtype", "na_action"]

def setup(self, mapper, dtype, na_action):
map_size = 1000
map_data = Series(map_size - np.arange(map_size), dtype=dtype)

Expand All @@ -168,8 +172,8 @@ def setup(self, mapper, dtype):

self.s = Series(np.random.randint(0, map_size, 10000), dtype=dtype)

def time_map(self, mapper, *args, **kwargs):
self.s.map(self.map_data)
def time_map(self, mapper, dtype, na_action):
self.s.map(self.map_data, na_action=na_action)


class Clip:
Expand Down
Loading

0 comments on commit cd9b735

Please sign in to comment.