from kfp import compiler import kfp.dsl as dsl import kfp.components as comp
scikit_learn_train = comp.load_component_from_file('component.yaml') @dsl.pipeline( name='Scikit-learn Trainer', description='Trains a Scikit-learn model') # Use a function to define the pipeline. defscikit_learn_trainer( training_data_path='gs://cloud-samples-data/ml-engine/iris/classification/train.csv', test_data_path='gs://cloud-samples-data/ml-engine/iris/classification/evaluate.csv', output_dir='/tmp', estimator_name='GradientBoostingClassifier', hyperparameters='n_estimators 100 max_depth 4'):
# Use the component you loaded in the previous step to create a pipeline task. sklearn_op = scikit_learn_train(training_data_path, test_data_path, output_dir, estimator_name, hyperparameters) compiler.Compiler().compile(scikit_learn_trainer, './pipeline.zip')
接著呢,就可以看到產生的pipeline.zip了。
可以看出來,這邊我們用別人提供的pipeline,建立自己要用的pipeline.zip,接著就可以透過kubeflow pipeline上傳、測試,大大節省了很多步驟,也就是plug-and-play AI components。