Skip to content

Commit

Permalink
Manually install cri-dockerd before installing kubernetes (#3166)
Browse files Browse the repository at this point in the history
  • Loading branch information
wjsi committed Jun 24, 2022
1 parent bde1beb commit cba1b43
Show file tree
Hide file tree
Showing 12 changed files with 80 additions and 171 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/core-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ jobs:
shell: bash
run: |
source ./ci/reload-env.sh
source ./.github/workflows/run-tests.sh
source ./ci/run-tests.sh
coverage xml
- name: Report coverage data
Expand Down
26 changes: 0 additions & 26 deletions .github/workflows/download-etcd.sh

This file was deleted.

28 changes: 0 additions & 28 deletions .github/workflows/install-minikube.sh

This file was deleted.

4 changes: 2 additions & 2 deletions .github/workflows/platform-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ jobs:
else
pip install virtualenv flaky
if [ -n "$WITH_KUBERNETES" ]; then
./.github/workflows/install-minikube.sh
./ci/install-minikube.sh
pip install kubernetes
fi
if [ -n "$WITH_HADOOP" ]; then
./.github/workflows/install-hadoop.sh
./ci/install-hadoop.sh
echo "import coverage; coverage.process_startup()" > \
$(python -c "import site; print(site.getsitepackages()[-1])")/coverage.pth
conda install -n test --quiet --yes -c conda-forge python=$PYTHON skein conda-pack
Expand Down
File renamed without changes.
45 changes: 45 additions & 0 deletions ci/install-minikube.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash
set -e
export CHANGE_MINIKUBE_NONE_USER=true

sudo apt-get -q update || true
sudo apt-get install -yq conntrack jq

CRI_DOCKERD_VERSION=$(curl -s https://api.github.com/repos/Mirantis/cri-dockerd/releases/latest | jq -r .tag_name | sed 's/v//g')
curl -Lo /tmp/cri-dockerd.tgz "https://github.com/Mirantis/cri-dockerd/releases/download/v$CRI_DOCKERD_VERSION/cri-dockerd-$CRI_DOCKERD_VERSION.amd64.tgz"
tar xzf /tmp/cri-dockerd.tgz -C /tmp
sudo mv /tmp/cri-dockerd/cri-dockerd /usr/local/bin/
cri-dockerd --version

sudo curl -Lo /etc/systemd/system/cri-docker.service https://raw.githubusercontent.com/Mirantis/cri-dockerd/master/packaging/systemd/cri-docker.service
sudo curl -Lo /etc/systemd/system/cri-docker.socket https://raw.githubusercontent.com/Mirantis/cri-dockerd/master/packaging/systemd/cri-docker.socket
sudo sed -i -e 's,/usr/bin/cri-dockerd,/usr/local/bin/cri-dockerd,' /etc/systemd/system/cri-docker.service
sudo systemctl daemon-reload
sudo systemctl enable cri-docker.service
sudo systemctl enable --now cri-docker.socket

CRICTL_VERSION=$(curl -s https://api.github.com/repos/kubernetes-sigs/cri-tools/releases/latest | jq -r .tag_name)
curl -Lo /tmp/crictl.tar.gz "https://github.com/kubernetes-sigs/cri-tools/releases/download/$CRICTL_VERSION/crictl-$CRICTL_VERSION-linux-amd64.tar.gz"
sudo tar xzf /tmp/crictl.tar.gz -C /usr/local/bin

curl -Lo minikube https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 && \
chmod +x minikube && sudo mv minikube /usr/local/bin/

sudo minikube start --vm-driver=none
export KUBECONFIG=$HOME/.kube/config
sudo cp -R /root/.kube /root/.minikube $HOME/
sudo chown -R $(id -u):$(id -g) $HOME/.kube $HOME/.minikube

sed "s/root/home\/$USER/g" $KUBECONFIG > tmp
mv tmp $KUBECONFIG

minikube update-context

K8S_VERSION=$(minikube kubectl -- version --client --output='json' | jq -r '.clientVersion.gitVersion')
curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/$K8S_VERSION/bin/linux/amd64/kubectl && \
chmod +x kubectl && sudo mv kubectl /usr/local/bin/

JSONPATH='{range .items[*]}{@.metadata.name}:{range @.status.conditions[*]}{@.type}={@.status};{end}{end}'
until kubectl get nodes -o jsonpath="$JSONPATH" 2>&1 | grep -q "Ready=True"; do
sleep 1
done
File renamed without changes.
4 changes: 2 additions & 2 deletions .github/workflows/run-tests.sh → ci/run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ if [ -n "$WITH_CYTHON" ]; then
mars/tests \
mars/core/graph \
mars/serialization
python .github/workflows/remove_tracer_errors.py
python ci/remove_tracer_errors.py
coverage combine
mv .coverage build/.coverage.non-oscar.file

coverage run --rcfile=setup.cfg -m pytest $PYTEST_CONFIG_WITHOUT_COV mars/oscar
python .github/workflows/remove_tracer_errors.py
python ci/remove_tracer_errors.py
coverage combine
mv .coverage build/.coverage.oscar_ctx.file

Expand Down
43 changes: 9 additions & 34 deletions mars/dataframe/merge/append.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,40 +31,15 @@
class DataFrameAppend(DataFrameOperand, DataFrameOperandMixin):
_op_type_ = OperandDef.APPEND

_ignore_index = BoolField("ignore_index")
_verify_integrity = BoolField("verify_integrity")
_sort = BoolField("sort")

def __init__(
self,
ignore_index=None,
verify_integrity=None,
sort=None,
output_types=None,
**kw,
):
super().__init__(
_ignore_index=ignore_index,
_verify_integrity=verify_integrity,
_sort=sort,
_output_types=output_types,
**kw,
)

@property
def ignore_index(self):
return self._ignore_index

@property
def verify_integrity(self):
return self._verify_integrity
ignore_index = BoolField("ignore_index")
verify_integrity = BoolField("verify_integrity")
sort = BoolField("sort")

@property
def sort(self):
return self._sort
def __init__(self, output_types=None, **kw):
super().__init__(_output_types=output_types, **kw)

@classmethod
def _tile_dataframe(cls, op):
def _tile_dataframe(cls, op: "DataFrameAppend"):
out_df = op.outputs[0]
inputs = op.inputs
first_df, others = inputs[0], inputs[1:]
Expand Down Expand Up @@ -110,7 +85,7 @@ def _tile_dataframe(cls, op):
)

@classmethod
def _tile_series(cls, op):
def _tile_series(cls, op: "DataFrameAppend"):
out_series = op.outputs[0]
inputs = op.inputs
first_series, others = inputs[0], inputs[1:]
Expand Down Expand Up @@ -151,7 +126,7 @@ def _tile_series(cls, op):
)

@classmethod
def tile(cls, op):
def tile(cls, op: "DataFrameAppend"):
if op.output_types[0] == OutputType.dataframe:
return (yield from cls._tile_dataframe(op))
else:
Expand Down Expand Up @@ -222,7 +197,7 @@ def _call_series(self, df, other):
)

@classmethod
def execute(cls, ctx, op):
def execute(cls, ctx, op: "DataFrameAppend"):
first, others = ctx[op.inputs[0].key], [ctx[inp.key] for inp in op.inputs[1:]]
r = first.append(others, verify_integrity=op.verify_integrity, sort=op.sort)
ctx[op.outputs[0].key] = r
Expand Down
91 changes: 18 additions & 73 deletions mars/dataframe/merge/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,81 +40,26 @@
class DataFrameConcat(DataFrameOperand, DataFrameOperandMixin):
_op_type_ = OperandDef.CONCATENATE

_axis = AnyField("axis")
_join = StringField("join")
_ignore_index = BoolField("ignore_index")
_keys = ListField("keys")
_levels = ListField("levels")
_names = ListField("names")
_verify_integrity = BoolField("verify_integrity")
_sort = BoolField("sort")
_copy = BoolField("copy")

def __init__(
self,
axis=None,
join=None,
ignore_index=None,
keys=None,
levels=None,
names=None,
verify_integrity=None,
sort=None,
copy=None,
sparse=None,
output_types=None,
**kw
):
super().__init__(
_axis=axis,
_join=join,
_ignore_index=ignore_index,
_keys=keys,
_levels=levels,
_names=names,
_verify_integrity=verify_integrity,
_sort=sort,
_copy=copy,
_output_types=output_types,
sparse=sparse,
**kw
)

@property
def axis(self):
return self._axis

@property
def join(self):
return self._join

@property
def ignore_index(self):
return self._ignore_index

@property
def keys(self):
return self._keys
axis = AnyField("axis", default=None)
join = StringField("join", default=None)
ignore_index = BoolField("ignore_index", default=None)
keys = ListField("keys", default=None)
levels = ListField("levels", default=None)
names = ListField("names", default=None)
verify_integrity = BoolField("verify_integrity", default=None)
sort = BoolField("sort", default=None)
copy_ = BoolField("copy", default=None)

def __init__(self, copy=None, output_types=None, **kw):
super().__init__(copy_=copy, _output_types=output_types, **kw)

@property
def level(self):
return self._levels
return self.levels

@property
def name(self):
return self._names

@property
def verify_integrity(self):
return self._verify_integrity

@property
def sort(self):
return self._sort

@property
def copy_(self):
return self._copy
return self.names

@classmethod
def _tile_dataframe(cls, op):
Expand Down Expand Up @@ -184,7 +129,7 @@ def _tile_dataframe(cls, op):
)

@classmethod
def _tile_series(cls, op):
def _tile_series(cls, op: "DataFrameConcat"):
from ..datasource.from_tensor import DataFrameFromTensor
from ..indexing.iloc import SeriesIlocGetItem, DataFrameIlocGetItem

Expand Down Expand Up @@ -292,14 +237,14 @@ def _tile_series(cls, op):
)

@classmethod
def tile(cls, op):
def tile(cls, op: "DataFrameConcat"):
if isinstance(op.inputs[0], SERIES_TYPE):
return (yield from cls._tile_series(op))
else:
return (yield from cls._tile_dataframe(op))

@classmethod
def execute(cls, ctx, op):
def execute(cls, ctx, op: "DataFrameConcat"):
def _base_concat(chunk, inputs):
# auto generated concat when executing a DataFrame, Series or Index
if chunk.op.output_types[0] == OutputType.dataframe:
Expand Down Expand Up @@ -490,7 +435,7 @@ def _call_dataframes(self, objs):
else:
empty_dfs.append(build_empty_series(df.dtype, name=df.name))

emtpy_result = pd.concat(empty_dfs, join=self.join, sort=True)
emtpy_result = pd.concat(empty_dfs, join=self.join, sort=self.sort)
shape = (row_length, emtpy_result.shape[1])
columns_value = parse_index(emtpy_result.columns, store_data=True)

Expand Down
6 changes: 2 additions & 4 deletions mars/dataframe/statistics/quantile.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def _tile_dataframe(cls, op):
stack_op = TensorStack(axis=0, dtype=dtype)
tr = stack_op(ts)
r = series_from_tensor(
tr, index=df.index_value.to_pandas(), name=np.asscalar(ts[0].op.q)
tr, index=df.index_value.to_pandas(), name=ts[0].op.q.item()
)
else:
assert op.axis == 1
Expand All @@ -286,9 +286,7 @@ def _tile_dataframe(cls, op):
interpolation=op.interpolation,
handle_non_numeric=not op.numeric_only,
)
r = series_from_tensor(
tr, index=op.input.index, name=np.asscalar(tr.op.q)
)
r = series_from_tensor(tr, index=op.input.index, name=tr.op.q.item())
else:
assert df.ndim == 2
if op.axis == 0:
Expand Down
2 changes: 1 addition & 1 deletion mars/tensor/indexing/setitem.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def _execute_reduce(cls, ctx, op):
for axis in range(input_data.ndim):
if axis in op.shuffle_axes:
indexes.append(next(index_iter) - op.chunk_offsets[axis])
input_data[indexes] = value
input_data[tuple(indexes)] = value

ctx[op.outputs[0].key] = input_data

Expand Down

0 comments on commit cba1b43

Please sign in to comment.