Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(sdk)!: move v1 to deprecated folder #7291

Merged
merged 46 commits into from
Mar 4, 2022
Merged
Show file tree
Hide file tree
Changes from 42 commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
bd37968
chore(sdk): move v1 to deprecated
ji-yaqi Feb 9, 2022
405f51c
fix testsg
ji-yaqi Feb 10, 2022
5ee1fe9
fix testsg
ji-yaqi Feb 10, 2022
e401eb0
fix setup.py
ji-yaqi Feb 10, 2022
1022c33
Merge branch 'namespace_pr' of github.com:ji-yaqi/pipelines into name…
ji-yaqi Feb 10, 2022
ed7c05f
Merge branch 'namespace_pr' of github.com:ji-yaqi/pipelines into name…
ji-yaqi Feb 10, 2022
757df75
Merge branch 'namespace_pr' of github.com:ji-yaqi/pipelines into name…
ji-yaqi Feb 10, 2022
f8d9e05
Merge branch 'namespace_pr' of github.com:ji-yaqi/pipelines into name…
ji-yaqi Feb 10, 2022
d3e3f97
Merge branch 'namespace_pr' of github.com:ji-yaqi/pipelines into name…
ji-yaqi Feb 10, 2022
9e3490d
fix test
ji-yaqi Feb 10, 2022
d39bb62
s'
ji-yaqi Feb 10, 2022
9960a2f
fix tests
ji-yaqi Feb 10, 2022
3376f46
fix test
ji-yaqi Feb 13, 2022
6a98d98
retore v2 test changes
ji-yaqi Feb 13, 2022
d5e5160
fix py3.6 test
ji-yaqi Feb 13, 2022
232567b
fix py3.6 test
ji-yaqi Feb 14, 2022
ebbc687
fix py3.6 import fallback error
ji-yaqi Feb 14, 2022
06e95ea
remove deprecated
ji-yaqi Feb 14, 2022
11f014d
fix samples test
ji-yaqi Feb 14, 2022
bd6e258
sample test
ji-yaqi Feb 15, 2022
322e8a9
fix samples
ji-yaqi Feb 16, 2022
1b9e0c2
Merge branch 'master' of github.com:ji-yaqi/pipelines into namespace_pr
ji-yaqi Feb 16, 2022
cfe4347
add readme
ji-yaqi Feb 16, 2022
fca4351
restroe test
ji-yaqi Feb 16, 2022
bede848
python require
ji-yaqi Feb 16, 2022
be082b4
remove path
ji-yaqi Feb 17, 2022
64110c5
fix tests
ji-yaqi Feb 22, 2022
d4678aa
inteegration tests
ji-yaqi Feb 22, 2022
a890239
remove tfx tests for dependency with kfp v1
ji-yaqi Feb 22, 2022
986cfb3
fix e2e
ji-yaqi Feb 22, 2022
1a2fe34
fix e2e
ji-yaqi Feb 22, 2022
c844d54
fix integration tests
ji-yaqi Feb 22, 2022
c1f7746
fix sampe
ji-yaqi Feb 22, 2022
ad3e3c2
move client down
ji-yaqi Mar 2, 2022
f05c81c
change to kfp
ji-yaqi Mar 2, 2022
b033c2a
add import alias
ji-yaqi Mar 2, 2022
5f1abef
fix
ji-yaqi Mar 2, 2022
7a9dcba
runid
ji-yaqi Mar 2, 2022
e506d9e
fix dsl
ji-yaqi Mar 3, 2022
64218af
only use kfp for function
ji-yaqi Mar 3, 2022
b439b04
revert train_until_good
ji-yaqi Mar 3, 2022
1d9ee5b
Merge branch 'master' of github.com:ji-yaqi/pipelines into namespace_pr
ji-yaqi Mar 3, 2022
5dde73e
tfx test
ji-yaqi Mar 3, 2022
d5adb28
kfp
ji-yaqi Mar 3, 2022
4abe941
try import
ji-yaqi Mar 3, 2022
e96fc17
onprem
ji-yaqi Mar 3, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion backend/src/v2/test/sample_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import json
import yaml
from kubernetes import client as k8s_client
import kfp
import kfp.deprecated as kfp

download_gcs_tgz = kfp.components.load_component_from_file(
'components/download_gcs_tgz.yaml')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"source": [
"# AutoML Tables components\n",
"\n",
"from kfp.components import load_component_from_url\n",
"from kfp.deprecated.components import load_component_from_url\n",
"\n",
"automl_create_dataset_for_tables_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b3179d86b239a08bf4884b50dbf3a9151da96d66/components/gcp/automl/create_dataset_for_tables/component.yaml')\n",
"automl_import_data_from_bigquery_source_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b3179d86b239a08bf4884b50dbf3a9151da96d66/components/gcp/automl/import_data_from_bigquery/component.yaml')\n",
Expand All @@ -47,7 +47,7 @@
"outputs": [],
"source": [
"# Define the pipeline\n",
"import kfp\n",
"import kfp.deprecated as kfp\n",
"\n",
"def retail_product_stockout_prediction_pipeline_gcs(\n",
" gcp_project_id: str,\n",
Expand Down Expand Up @@ -161,4 +161,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}
4 changes: 2 additions & 2 deletions samples/core/XGBoost/xgboost_sample.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import kfp
from kfp import components
import kfp.deprecated as kfp
from kfp.deprecated import components

chicago_taxi_dataset_op = components.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml'
Expand Down
2 changes: 1 addition & 1 deletion samples/core/XGBoost/xgboost_sample_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from .xgboost_sample import xgboost_pipeline
from kfp.samples.test.utils import run_pipeline_func, TestCase

Expand Down
14 changes: 7 additions & 7 deletions samples/core/caching/caching.ipynb
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"kfp_endpoint = None\n",
"\n",
"import datetime\n",
"import time\n",
"\n",
"import kfp\n",
"from kfp.components import create_component_from_func\n",
"import kfp.deprecated as kfp\n",
"from kfp.deprecated.components import create_component_from_func\n",
"\n",
"\n",
"@create_component_from_func\n",
Expand All @@ -26,11 +30,7 @@
"def caching_pipeline(seconds: float = 60):\n",
" # All outputs of successful executions are cached\n",
" work_task = do_work_op(seconds)\n"
],
"cell_type": "code",
"metadata": {},
"execution_count": null,
"outputs": []
]
},
{
"cell_type": "code",
Expand Down
4 changes: 2 additions & 2 deletions samples/core/caching/caching_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import datetime
import time

import kfp
from kfp.components import create_component_from_func
import kfp.deprecated as kfp
from kfp.deprecated.components import create_component_from_func


@create_component_from_func
Expand Down
2 changes: 1 addition & 1 deletion samples/core/caching/caching_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from kfp.samples.test.utils import TestCase, relative_path, run_pipeline_func

run_pipeline_func([
Expand Down
8 changes: 4 additions & 4 deletions samples/core/condition/condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
from kfp import components
from kfp import dsl
from kfp.deprecated import components
from kfp.deprecated import dsl
from kfp.deprecated import compiler


def flip_coin(force_flip_result: str = '') -> str:
Expand Down Expand Up @@ -48,4 +48,4 @@ def condition(text: str = 'condition test', force_flip_result: str = ''):


if __name__ == '__main__':
kfp.compiler.Compiler().compile(condition, __file__ + '.yaml')
compiler.Compiler().compile(condition, __file__ + '.yaml')
2 changes: 1 addition & 1 deletion samples/core/condition/condition_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from __future__ import annotations

import unittest
import kfp
import kfp.deprecated as kfp
import kfp_server_api
from ml_metadata.proto import Execution
from .condition import condition
Expand Down
4 changes: 2 additions & 2 deletions samples/core/condition/nested_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from kfp import components
from kfp import dsl
from kfp.deprecated import components
from kfp.deprecated import dsl


@components.create_component_from_func
Expand Down
2 changes: 1 addition & 1 deletion samples/core/condition/nested_condition_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from .nested_condition import my_pipeline
from kfp.samples.test.utils import run_pipeline_func, TestCase

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
kfp_endpoint=None


import kfp
from kfp import components
import kfp.deprecated as kfp
from kfp.deprecated import components


chicago_taxi_dataset_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e3337b8bdcd63636934954e592d4b32c95b49129/components/datasets/Chicago%20Taxi/component.yaml')
Expand Down Expand Up @@ -52,7 +52,7 @@ def continuous_training_pipeline(
table=testing_data,
transform_code='''df = df[["tips"]]''',
).set_display_name('True values').output

true_values = drop_header_op(true_values_table).output

# Getting the active prod model
Expand Down Expand Up @@ -82,7 +82,7 @@ def continuous_training_pipeline(
label_column=0,
).output

# Calculating the regression metrics
# Calculating the regression metrics
metrics_task = calculate_regression_metrics_from_csv_op(
true_values=true_values,
predicted_values=predictions,
Expand All @@ -106,7 +106,7 @@ def continuous_training_pipeline(
with kfp.dsl.Condition(prod_model_uri != ""):
# Downloading the model
prod_model = download_from_gcs_op(prod_model_uri).output

# Training
model = xgboost_train_on_csv_op(
training_data=training_data,
Expand All @@ -123,7 +123,7 @@ def continuous_training_pipeline(
label_column=0,
).output

# Calculating the regression metrics
# Calculating the regression metrics
metrics_task = calculate_regression_metrics_from_csv_op(
true_values=true_values,
predicted_values=predictions,
Expand Down
8 changes: 4 additions & 4 deletions samples/core/dataflow/dataflow.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@
},
"outputs": [],
"source": [
"import kfp.components as comp\n",
"import kfp.deprecated.components as comp\n",
"\n",
"dataflow_python_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/1.7.0-rc.3/components/gcp/dataflow/launch_python/component.yaml')"
Expand Down Expand Up @@ -325,8 +325,8 @@
"metadata": {},
"outputs": [],
"source": [
"import kfp\n",
"import kfp.dsl as dsl\n",
"import kfp.deprecated as kfp\n",
"from kfp.deprecated import dsl, Client\n",
"import json\n",
"@dsl.pipeline(\n",
" name='dataflow-launch-python-pipeline',\n",
Expand Down Expand Up @@ -392,7 +392,7 @@
}
],
"source": [
"kfp.Client().create_run_from_pipeline_func(pipeline, arguments={})"
"Client().create_run_from_pipeline_func(pipeline, arguments={})"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion samples/core/dataflow/dataflow_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from kfp.samples.test.utils import TestCase, relative_path, run_pipeline_func

run_pipeline_func([
Expand Down
6 changes: 3 additions & 3 deletions samples/core/dns_config/dns_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
# limitations under the License.


import kfp
from kfp import dsl
import kfp.deprecated as kfp
from kfp.deprecated import dsl, compiler
from kubernetes.client.models import V1PodDNSConfig, V1PodDNSConfigOption


Expand Down Expand Up @@ -43,7 +43,7 @@ def dns_config_pipeline():
options=[V1PodDNSConfigOption(name="ndots", value="2")]
))

kfp.compiler.Compiler().compile(
compiler.Compiler().compile(
dns_config_pipeline,
__file__ + '.yaml',
pipeline_conf=pipeline_conf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,11 @@
{
"cell_type": "code",
"execution_count": 2,
"metadata": {"tags":["skip-in-test"]},
"metadata": {
"tags": [
"skip-in-test"
]
},
"outputs": [
{
"name": "stdout",
Expand Down Expand Up @@ -245,9 +249,10 @@
"metadata": {},
"outputs": [],
"source": [
"import kfp.components as comp\n",
"import kfp.dsl as dsl\n",
"import kfp.compiler as compiler\n",
"from kfp.deprecated import components as comp\n",
"from kfp.deprecated import dsl\n",
"from kfp.deprecated import compiler\n",
"\n",
"# The components are loaded as task factories that generate container_ops.\n",
"task_factory_a = comp.load_component_from_text(text=component_a)\n",
"task_factory_b = comp.load_component_from_text(text=component_b)\n",
Expand Down Expand Up @@ -350,12 +355,13 @@
}
],
"source": [
"import kfp.components as comp\n",
"import kfp.dsl as dsl\n",
"import kfp.compiler as compiler\n",
"from kfp.dsl.types import InconsistentTypeException\n",
"task_factory_a = comp.load_component_from_text(text=component_a)\n",
"task_factory_b = comp.load_component_from_text(text=component_b)\n",
"from kfp.deprecated import components\n",
"from kfp.deprecated import dsl\n",
"from kfp.deprecated import compiler\n",
"from kfp.deprecated.dsl.types import InconsistentTypeException\n",
"\n",
"task_factory_a = components.load_component_from_text(text=component_a)\n",
"task_factory_b = components.load_component_from_text(text=component_b)\n",
"\n",
"#Use the component as part of the pipeline\n",
"@dsl.pipeline(name='type-check-b',\n",
Expand Down Expand Up @@ -407,9 +413,9 @@
"metadata": {},
"outputs": [],
"source": [
"from kfp.dsl import component\n",
"from kfp.dsl.types import Integer, GCSPath\n",
"from kfp.dsl import ContainerOp\n",
"from kfp.deprecated.dsl import component\n",
"from kfp.deprecated.dsl.types import Integer, GCSPath\n",
"from kfp.deprecated.dsl import ContainerOp\n",
"# when components are defined based on the component decorator,\n",
"# the type information is annotated to the input or function returns.\n",
"# There are two ways to define the type: string or a dictionary with the openapi_schema_validator property\n",
Expand Down Expand Up @@ -497,9 +503,9 @@
"metadata": {},
"outputs": [],
"source": [
"from kfp.dsl import component\n",
"from kfp.dsl.types import Integer, GCSPath\n",
"from kfp.dsl import ContainerOp\n",
"from kfp.deprecated.dsl import component\n",
"from kfp.deprecated.dsl.types import Integer, GCSPath\n",
"from kfp.deprecated.dsl import ContainerOp\n",
"# task_factory_a outputs an input field_m with the openapi_schema_validator different\n",
"# from the task_factory_b's input field_z.\n",
"# One is gs:// and the other is gcs://\n",
Expand Down Expand Up @@ -620,9 +626,9 @@
"metadata": {},
"outputs": [],
"source": [
"from kfp.dsl import component\n",
"from kfp.dsl.types import Integer, GCSPath\n",
"from kfp.dsl import ContainerOp\n",
"from kfp.deprecated.dsl import component\n",
"from kfp.deprecated.dsl.types import Integer, GCSPath\n",
"from kfp.deprecated.dsl import ContainerOp\n",
"# task_factory_a lacks the type information for output filed_n\n",
"# task_factory_b lacks the type information for input field_y\n",
"# When no type information is provided, it matches all types.\n",
Expand Down Expand Up @@ -796,10 +802,10 @@
"pycharm": {
"stem_cell": {
"cell_type": "raw",
"source": [],
"metadata": {
"collapsed": false
}
},
"source": []
}
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from kfp.samples.test.utils import TestCase, relative_path, run_pipeline_func

run_pipeline_func([
Expand Down
7 changes: 3 additions & 4 deletions samples/core/execution_order/execution_order.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@
# limitations under the License.


import kfp
from kfp import dsl
import kfp.components as comp
from kfp.deprecated import dsl, compiler
import kfp.deprecated.components as comp


@comp.create_component_from_func
Expand All @@ -40,4 +39,4 @@ def execution_order_pipeline(text1: str='message 1', text2: str='message 2'):
step2_task.after(step1_task)

if __name__ == '__main__':
kfp.compiler.Compiler().compile(execution_order_pipeline, __file__ + '.yaml')
compiler.Compiler().compile(execution_order_pipeline, __file__ + '.yaml')
2 changes: 1 addition & 1 deletion samples/core/execution_order/execution_order_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.deprecated as kfp
from kfp.samples.test.utils import TestCase, relative_path, run_pipeline_func

run_pipeline_func([
Expand Down